From 7aafa5eda7f5879cc790b812d7031b3c132c6838 Mon Sep 17 00:00:00 2001 From: slguan Date: Mon, 27 Apr 2020 12:04:10 +0800 Subject: [PATCH 01/15] [TD-184] --- src/mnode/src/mgmtBalance.c | 6 ++- src/vnode/src/vnodeMain.c | 2 +- tests/script/sh/deploy.sh | 1 + tests/script/tmp/prepare.sim | 28 +++++++++- tests/script/unique/db/commit.sim | 6 +-- tests/script/unique/db/delete.sim | 6 +-- tests/script/unique/db/delete_part.sim | 8 +-- tests/script/unique/db/replica_add12.sim | 60 +++++++++++++-------- tests/script/unique/db/replica_add13.sim | 8 +-- tests/script/unique/db/replica_add23.sim | 8 +-- tests/script/unique/db/replica_part.sim | 12 ++--- tests/script/unique/db/replica_reduce21.sim | 6 +-- tests/script/unique/db/replica_reduce31.sim | 6 +-- tests/script/unique/db/replica_reduce32.sim | 6 +-- 14 files changed, 104 insertions(+), 59 deletions(-) diff --git a/src/mnode/src/mgmtBalance.c b/src/mnode/src/mgmtBalance.c index 7b85dc08e3..c6c10e0da0 100644 --- a/src/mnode/src/mgmtBalance.c +++ b/src/mnode/src/mgmtBalance.c @@ -17,6 +17,7 @@ #include "os.h" #include "trpc.h" #include "tbalance.h" +#include "tglobal.h" #include "mgmtDef.h" #include "mgmtLog.h" #include "mgmtMnode.h" @@ -41,7 +42,10 @@ int32_t balanceAllocVnodes(SVgObj *pVgroup) { if (pDnode == NULL) break; if (pDnode->totalVnodes > 0 && pDnode->openVnodes < pDnode->totalVnodes) { - float usage = (float)pDnode->openVnodes / pDnode->totalVnodes; + float openVnodes = pDnode->openVnodes; + if (pDnode->isMgmt) openVnodes += tsMgmtEqualVnodeNum; + + float usage = openVnodes / pDnode->totalVnodes; if (usage <= vnodeUsage) { pSelDnode = pDnode; vnodeUsage = usage; diff --git a/src/vnode/src/vnodeMain.c b/src/vnode/src/vnodeMain.c index 96470c4e3a..cac1030a93 100644 --- a/src/vnode/src/vnodeMain.c +++ b/src/vnode/src/vnodeMain.c @@ -286,7 +286,7 @@ void *vnodeGetVnode(int32_t vgId) { SVnodeObj **ppVnode = (SVnodeObj **)taosGetIntHashData(tsDnodeVnodesHash, vgId); if (ppVnode == NULL || *ppVnode == NULL) { terrno = TSDB_CODE_INVALID_VGROUP_ID; - dError("vgId:%d not exist", vgId); + dPrint("vgId:%d not exist", vgId); return NULL; } diff --git a/tests/script/sh/deploy.sh b/tests/script/sh/deploy.sh index 907ca6848b..e2618a78cb 100755 --- a/tests/script/sh/deploy.sh +++ b/tests/script/sh/deploy.sh @@ -101,6 +101,7 @@ echo "monitorDebugFlag 131" >> $TAOS_CFG echo "udebugFlag 131" >> $TAOS_CFG echo "jnidebugFlag 131" >> $TAOS_CFG echo "monitor 0" >> $TAOS_CFG +echo "http 0" >> $TAOS_CFG echo "numOfThreadsPerCore 2.0" >> $TAOS_CFG echo "defaultPass taosdata" >> $TAOS_CFG echo "numOfLogLines 100000000" >> $TAOS_CFG diff --git a/tests/script/tmp/prepare.sim b/tests/script/tmp/prepare.sim index 731b707434..7890864360 100644 --- a/tests/script/tmp/prepare.sim +++ b/tests/script/tmp/prepare.sim @@ -1,4 +1,30 @@ system sh/stop_dnodes.sh +system sh/ip.sh -i 1 -s up +system sh/ip.sh -i 2 -s up +system sh/ip.sh -i 3 -s up +system sh/ip.sh -i 4 -s up + system sh/deploy.sh -n dnode1 -m 192.168.0.1 -i 192.168.0.1 system sh/deploy.sh -n dnode2 -m 192.168.0.1 -i 192.168.0.2 -system sh/deploy.sh -n dnode3 -m 192.168.0.1 -i 192.168.0.3 \ No newline at end of file +system sh/deploy.sh -n dnode3 -m 192.168.0.1 -i 192.168.0.3 +system sh/deploy.sh -n dnode4 -m 192.168.0.1 -i 192.168.0.4 + +system sh/cfg.sh -n dnode1 -c commitLog -v 2 +system sh/cfg.sh -n dnode2 -c commitLog -v 2 +system sh/cfg.sh -n dnode3 -c commitLog -v 2 +system sh/cfg.sh -n dnode4 -c commitLog -v 2 + +system sh/cfg.sh -n dnode1 -c numOfMPeers -v 1 +system sh/cfg.sh -n dnode2 -c numOfMPeers -v 1 +system sh/cfg.sh -n dnode3 -c numOfMPeers -v 1 +system sh/cfg.sh -n dnode4 -c numOfMPeers -v 1 + +system sh/cfg.sh -n dnode1 -c mgmtEqualVnodeNum -v 4 +system sh/cfg.sh -n dnode2 -c mgmtEqualVnodeNum -v 4 +system sh/cfg.sh -n dnode3 -c mgmtEqualVnodeNum -v 4 +system sh/cfg.sh -n dnode4 -c mgmtEqualVnodeNum -v 4 + +system sh/cfg.sh -n dnode1 -c numOfTotalVnodes -v 4 +system sh/cfg.sh -n dnode2 -c numOfTotalVnodes -v 4 +system sh/cfg.sh -n dnode3 -c numOfTotalVnodes -v 4 +system sh/cfg.sh -n dnode4 -c numOfTotalVnodes -v 4 \ No newline at end of file diff --git a/tests/script/unique/db/commit.sim b/tests/script/unique/db/commit.sim index 7111574081..7f86f85c97 100644 --- a/tests/script/unique/db/commit.sim +++ b/tests/script/unique/db/commit.sim @@ -6,9 +6,9 @@ system sh/deploy.sh -n dnode1 -m 192.168.0.1 -i 192.168.0.1 system sh/deploy.sh -n dnode2 -m 192.168.0.1 -i 192.168.0.2 system sh/deploy.sh -n dnode3 -m 192.168.0.1 -i 192.168.0.3 -system sh/cfg.sh -n dnode1 -c commitLog -v 0 -system sh/cfg.sh -n dnode2 -c commitLog -v 0 -system sh/cfg.sh -n dnode3 -c commitLog -v 0 +system sh/cfg.sh -n dnode1 -c commitLog -v 2 +system sh/cfg.sh -n dnode2 -c commitLog -v 2 +system sh/cfg.sh -n dnode3 -c commitLog -v 2 system sh/cfg.sh -n dnode1 -c numOfMPeers -v 1 system sh/cfg.sh -n dnode2 -c numOfMPeers -v 1 system sh/cfg.sh -n dnode3 -c numOfMPeers -v 1 diff --git a/tests/script/unique/db/delete.sim b/tests/script/unique/db/delete.sim index ab44841522..9262239b04 100644 --- a/tests/script/unique/db/delete.sim +++ b/tests/script/unique/db/delete.sim @@ -6,9 +6,9 @@ system sh/deploy.sh -n dnode1 -m 192.168.0.1 -i 192.168.0.1 system sh/deploy.sh -n dnode2 -m 192.168.0.1 -i 192.168.0.2 system sh/deploy.sh -n dnode3 -m 192.168.0.1 -i 192.168.0.3 -system sh/cfg.sh -n dnode1 -c clog -v 0 -system sh/cfg.sh -n dnode2 -c clog -v 0 -system sh/cfg.sh -n dnode3 -c clog -v 0 +system sh/cfg.sh -n dnode1 -c clog -v 2 +system sh/cfg.sh -n dnode2 -c clog -v 2 +system sh/cfg.sh -n dnode3 -c clog -v 2 system sh/cfg.sh -n dnode1 -c numOfMPeers -v 3 system sh/cfg.sh -n dnode2 -c numOfMPeers -v 3 system sh/cfg.sh -n dnode3 -c numOfMPeers -v 3 diff --git a/tests/script/unique/db/delete_part.sim b/tests/script/unique/db/delete_part.sim index 0a03bc558c..a0a3356cd0 100644 --- a/tests/script/unique/db/delete_part.sim +++ b/tests/script/unique/db/delete_part.sim @@ -9,10 +9,10 @@ system sh/deploy.sh -n dnode2 -m 192.168.0.1 -i 192.168.0.2 system sh/deploy.sh -n dnode3 -m 192.168.0.1 -i 192.168.0.3 system sh/deploy.sh -n dnode4 -m 192.168.0.1 -i 192.168.0.4 -system sh/cfg.sh -n dnode1 -c commitLog -v 0 -system sh/cfg.sh -n dnode2 -c commitLog -v 0 -system sh/cfg.sh -n dnode3 -c commitLog -v 0 -system sh/cfg.sh -n dnode4 -c commitLog -v 0 +system sh/cfg.sh -n dnode1 -c clog -v 2 +system sh/cfg.sh -n dnode2 -c clog -v 2 +system sh/cfg.sh -n dnode3 -c clog -v 2 +system sh/cfg.sh -n dnode4 -c clog -v 2 system sh/cfg.sh -n dnode1 -c numOfMPeers -v 1 system sh/cfg.sh -n dnode2 -c numOfMPeers -v 1 diff --git a/tests/script/unique/db/replica_add12.sim b/tests/script/unique/db/replica_add12.sim index 6ef47132c2..9f5adfeb06 100644 --- a/tests/script/unique/db/replica_add12.sim +++ b/tests/script/unique/db/replica_add12.sim @@ -9,10 +9,10 @@ system sh/deploy.sh -n dnode2 -m 192.168.0.1 -i 192.168.0.2 system sh/deploy.sh -n dnode3 -m 192.168.0.1 -i 192.168.0.3 system sh/deploy.sh -n dnode4 -m 192.168.0.1 -i 192.168.0.4 -system sh/cfg.sh -n dnode1 -c commitLog -v 1 -system sh/cfg.sh -n dnode2 -c commitLog -v 1 -system sh/cfg.sh -n dnode3 -c commitLog -v 1 -system sh/cfg.sh -n dnode4 -c commitLog -v 1 +system sh/cfg.sh -n dnode1 -c clog -v 2 +system sh/cfg.sh -n dnode2 -c clog -v 2 +system sh/cfg.sh -n dnode3 -c clog -v 2 +system sh/cfg.sh -n dnode4 -c clog -v 2 system sh/cfg.sh -n dnode1 -c numOfMPeers -v 1 system sh/cfg.sh -n dnode2 -c numOfMPeers -v 1 @@ -30,12 +30,12 @@ system sh/cfg.sh -n dnode3 -c numOfTotalVnodes -v 4 system sh/cfg.sh -n dnode4 -c numOfTotalVnodes -v 4 print ========= start dnodes -system sh/exec.sh -n dnode1 -s start +system sh/exec_up.sh -n dnode1 -s start sql connect sql create dnode 192.168.0.2 -system sh/exec.sh -n dnode2 -s start +system sh/exec_up.sh -n dnode2 -s start sql create dnode 192.168.0.3 -system sh/exec.sh -n dnode3 -s start +system sh/exec_up.sh -n dnode3 -s start sleep 3000 print ======== step1 @@ -76,12 +76,29 @@ endi sleep 2000 +sql show dnodes +print dnode192.168.0.1 ==> openVnodes: $data3_1 +print dnode192.168.0.2 ==> openVnodes: $data3_2 +print dnode192.168.0.3 ==> openVnodes: $data3_3 + +if $data3_1 != 0 then + return -1 +endi + +if $data3_2 != 2 then + return -1 +endi + +if $data3_3 != 2 then + return -1 +endi + print ======== step2 sql alter database d1 replica 2 sql alter database d2 replica 2 sql alter database d3 replica 2 sql alter database d4 replica 2 -sleep 22000 +sleep 2000 print ======== step3 $x = 0 @@ -93,25 +110,22 @@ show3: endi sql show dnodes -print dnode192.168.0.1 ==> openVnodes: $data2_192.168.0.1 freeVnodes: $data3_192.168.0.1 -print dnode192.168.0.2 ==> openVnodes: $data2_192.168.0.2 freeVnodes: $data3_192.168.0.2 -print dnode192.168.0.3 ==> openVnodes: $data2_192.168.0.3 freeVnodes: $data3_192.168.0.3 +print dnode192.168.0.1 ==> openVnodes: $data3_1 +print dnode192.168.0.2 ==> openVnodes: $data3_2 +print dnode192.168.0.3 ==> openVnodes: $data3_3 -if $data2_192.168.0.1 != 0 then +if $data3_1 != 0 then goto show3 endi -if $data3_192.168.0.1 != 4 then +if $data3_2 != 4 then goto show3 endi -if $data3_192.168.0.2 != 0 then +if $data3_3 != 4 then goto show3 endi -if $data3_192.168.0.3 != 0 then - goto show3 -endi print ======== step4 sql insert into d1.t1 values(now, 2) @@ -139,9 +153,9 @@ if $rows != 2 then return -1 endi -sleep 20000 +sleep 10000 print ========= step5 -system sh/exec.sh -n dnode2 -s stop -x SIGINT +system sh/exec_up.sh -n dnode2 -s stop -x SIGINT sleep 5000 sql insert into d1.t1 values(now, 3) @@ -170,9 +184,9 @@ if $rows != 3 then endi print ========= step6 -system sh/exec.sh -n dnode2 -s start +system sh/exec_up.sh -n dnode2 -s start sleep 10000 -system sh/exec.sh -n dnode3 -s stop -x SIGINT +system sh/exec_up.sh -n dnode3 -s stop -x SIGINT sleep 10000 sql insert into d1.t1 values(now, 4) @@ -201,9 +215,9 @@ if $rows != 4 then endi print ========= step7 -system sh/exec.sh -n dnode3 -s start +system sh/exec_up.sh -n dnode3 -s start sleep 10000 -system sh/exec.sh -n dnode2 -s stop -x SIGINT +system sh/exec_up.sh -n dnode2 -s stop -x SIGINT sleep 10000 sql insert into d1.t1 values(now, 5) diff --git a/tests/script/unique/db/replica_add13.sim b/tests/script/unique/db/replica_add13.sim index ff8e73e97a..fd5515f1a8 100644 --- a/tests/script/unique/db/replica_add13.sim +++ b/tests/script/unique/db/replica_add13.sim @@ -9,10 +9,10 @@ system sh/deploy.sh -n dnode2 -m 192.168.0.1 -i 192.168.0.2 system sh/deploy.sh -n dnode3 -m 192.168.0.1 -i 192.168.0.3 system sh/deploy.sh -n dnode4 -m 192.168.0.1 -i 192.168.0.4 -system sh/cfg.sh -n dnode1 -c commitLog -v 1 -system sh/cfg.sh -n dnode2 -c commitLog -v 1 -system sh/cfg.sh -n dnode3 -c commitLog -v 1 -system sh/cfg.sh -n dnode4 -c commitLog -v 1 +system sh/cfg.sh -n dnode1 -c clog -v 2 +system sh/cfg.sh -n dnode2 -c clog -v 2 +system sh/cfg.sh -n dnode3 -c clog -v 2 +system sh/cfg.sh -n dnode4 -c clog -v 2 system sh/cfg.sh -n dnode1 -c numOfMPeers -v 1 system sh/cfg.sh -n dnode2 -c numOfMPeers -v 1 diff --git a/tests/script/unique/db/replica_add23.sim b/tests/script/unique/db/replica_add23.sim index d839efa82b..9aea2e5a4a 100644 --- a/tests/script/unique/db/replica_add23.sim +++ b/tests/script/unique/db/replica_add23.sim @@ -9,10 +9,10 @@ system sh/deploy.sh -n dnode2 -m 192.168.0.1 -i 192.168.0.2 system sh/deploy.sh -n dnode3 -m 192.168.0.1 -i 192.168.0.3 system sh/deploy.sh -n dnode4 -m 192.168.0.1 -i 192.168.0.4 -system sh/cfg.sh -n dnode1 -c commitLog -v 1 -system sh/cfg.sh -n dnode2 -c commitLog -v 1 -system sh/cfg.sh -n dnode3 -c commitLog -v 1 -system sh/cfg.sh -n dnode4 -c commitLog -v 1 +system sh/cfg.sh -n dnode1 -c clog -v 2 +system sh/cfg.sh -n dnode2 -c clog -v 2 +system sh/cfg.sh -n dnode3 -c clog -v 2 +system sh/cfg.sh -n dnode4 -c clog -v 2 system sh/cfg.sh -n dnode1 -c numOfMPeers -v 1 system sh/cfg.sh -n dnode2 -c numOfMPeers -v 1 diff --git a/tests/script/unique/db/replica_part.sim b/tests/script/unique/db/replica_part.sim index 7753b47b4e..f1b037a9d9 100644 --- a/tests/script/unique/db/replica_part.sim +++ b/tests/script/unique/db/replica_part.sim @@ -6,12 +6,12 @@ system sh/deploy.sh -n dnode1 -m 192.168.0.1 -i 192.168.0.1 system sh/deploy.sh -n dnode2 -m 192.168.0.1 -i 192.168.0.2 system sh/deploy.sh -n dnode3 -m 192.168.0.1 -i 192.168.0.3 -system sh/cfg.sh -n dnode1 -c commitLog -v 0 -system sh/cfg.sh -n dnode2 -c commitLog -v 0 -system sh/cfg.sh -n dnode3 -c commitLog -v 0 -system sh/cfg.sh -n dnode1 -c numOfMPeers -v 1 -system sh/cfg.sh -n dnode2 -c numOfMPeers -v 1 -system sh/cfg.sh -n dnode3 -c numOfMPeers -v 1 +system sh/cfg.sh -n dnode1 -c clog -v 2 +system sh/cfg.sh -n dnode2 -c clog -v 2 +system sh/cfg.sh -n dnode3 -c clog -v 2 +system sh/cfg.sh -n dnode1 -c numOfMPeers -v 2 +system sh/cfg.sh -n dnode2 -c numOfMPeers -v 2 +system sh/cfg.sh -n dnode3 -c numOfMPeers -v 2 system sh/cfg.sh -n dnode1 -c mgmtEqualVnodeNum -v 4 system sh/cfg.sh -n dnode2 -c mgmtEqualVnodeNum -v 4 system sh/cfg.sh -n dnode3 -c mgmtEqualVnodeNum -v 4 diff --git a/tests/script/unique/db/replica_reduce21.sim b/tests/script/unique/db/replica_reduce21.sim index e79aedbb24..085d07b714 100644 --- a/tests/script/unique/db/replica_reduce21.sim +++ b/tests/script/unique/db/replica_reduce21.sim @@ -6,9 +6,9 @@ system sh/deploy.sh -n dnode1 -m 192.168.0.1 -i 192.168.0.1 system sh/deploy.sh -n dnode2 -m 192.168.0.1 -i 192.168.0.2 system sh/deploy.sh -n dnode3 -m 192.168.0.1 -i 192.168.0.3 -system sh/cfg.sh -n dnode1 -c commitLog -v 1 -system sh/cfg.sh -n dnode2 -c commitLog -v 1 -system sh/cfg.sh -n dnode3 -c commitLog -v 1 +system sh/cfg.sh -n dnode1 -c clog -v 2 +system sh/cfg.sh -n dnode2 -c clog -v 2 +system sh/cfg.sh -n dnode3 -c clog -v 2 system sh/cfg.sh -n dnode1 -c numOfMPeers -v 1 system sh/cfg.sh -n dnode2 -c numOfMPeers -v 1 system sh/cfg.sh -n dnode3 -c numOfMPeers -v 1 diff --git a/tests/script/unique/db/replica_reduce31.sim b/tests/script/unique/db/replica_reduce31.sim index 92da979b97..1190d688aa 100644 --- a/tests/script/unique/db/replica_reduce31.sim +++ b/tests/script/unique/db/replica_reduce31.sim @@ -6,9 +6,9 @@ system sh/deploy.sh -n dnode1 -m 192.168.0.1 -i 192.168.0.1 system sh/deploy.sh -n dnode2 -m 192.168.0.1 -i 192.168.0.2 system sh/deploy.sh -n dnode3 -m 192.168.0.1 -i 192.168.0.3 -system sh/cfg.sh -n dnode1 -c commitLog -v 1 -system sh/cfg.sh -n dnode2 -c commitLog -v 1 -system sh/cfg.sh -n dnode3 -c commitLog -v 1 +system sh/cfg.sh -n dnode1 -c clog -v 2 +system sh/cfg.sh -n dnode2 -c clog -v 2 +system sh/cfg.sh -n dnode3 -c clog -v 2 system sh/cfg.sh -n dnode1 -c numOfMPeers -v 1 system sh/cfg.sh -n dnode2 -c numOfMPeers -v 1 system sh/cfg.sh -n dnode3 -c numOfMPeers -v 1 diff --git a/tests/script/unique/db/replica_reduce32.sim b/tests/script/unique/db/replica_reduce32.sim index 86f99e72be..cdb5ada79b 100644 --- a/tests/script/unique/db/replica_reduce32.sim +++ b/tests/script/unique/db/replica_reduce32.sim @@ -6,9 +6,9 @@ system sh/deploy.sh -n dnode1 -m 192.168.0.1 -i 192.168.0.1 system sh/deploy.sh -n dnode2 -m 192.168.0.1 -i 192.168.0.2 system sh/deploy.sh -n dnode3 -m 192.168.0.1 -i 192.168.0.3 -system sh/cfg.sh -n dnode1 -c commitLog -v 1 -system sh/cfg.sh -n dnode2 -c commitLog -v 1 -system sh/cfg.sh -n dnode3 -c commitLog -v 1 +system sh/cfg.sh -n dnode1 -c clog -v 2 +system sh/cfg.sh -n dnode2 -c clog -v 2 +system sh/cfg.sh -n dnode3 -c clog -v 2 system sh/cfg.sh -n dnode1 -c numOfMPeers -v 1 system sh/cfg.sh -n dnode2 -c numOfMPeers -v 1 system sh/cfg.sh -n dnode3 -c numOfMPeers -v 1 From 177334e0b70adc07b5d2b4106ce5a5ba6fc6fdc7 Mon Sep 17 00:00:00 2001 From: slguan Date: Mon, 27 Apr 2020 12:08:19 +0800 Subject: [PATCH 02/15] add script --- tests/script/unique/db/replica_add12.sim | 41 ++++++++++++++++-------- 1 file changed, 28 insertions(+), 13 deletions(-) diff --git a/tests/script/unique/db/replica_add12.sim b/tests/script/unique/db/replica_add12.sim index 9f5adfeb06..969a2a2091 100644 --- a/tests/script/unique/db/replica_add12.sim +++ b/tests/script/unique/db/replica_add12.sim @@ -98,16 +98,9 @@ sql alter database d1 replica 2 sql alter database d2 replica 2 sql alter database d3 replica 2 sql alter database d4 replica 2 -sleep 2000 +sleep 10000 print ======== step3 -$x = 0 -show3: - $x = $x + 1 - sleep 2000 - if $x == 10 then - return -1 - endi sql show dnodes print dnode192.168.0.1 ==> openVnodes: $data3_1 @@ -115,18 +108,17 @@ print dnode192.168.0.2 ==> openVnodes: $data3_2 print dnode192.168.0.3 ==> openVnodes: $data3_3 if $data3_1 != 0 then - goto show3 + return -1 endi if $data3_2 != 4 then - goto show3 + return -1 endi if $data3_3 != 4 then - goto show3 + return -1 endi - print ======== step4 sql insert into d1.t1 values(now, 2) sql insert into d2.t2 values(now, 2) @@ -153,11 +145,34 @@ if $rows != 2 then return -1 endi -sleep 10000 +sleep 2000 print ========= step5 system sh/exec_up.sh -n dnode2 -s stop -x SIGINT sleep 5000 +sql select * from d1.t1 +if $rows != 2 then + return -1 +endi + +sql select * from d2.t2 +if $rows != 2 then + return -1 +endi + +sql select * from d3.t3 +if $rows != 2 then + return -1 +endi + +sql select * from d4.t4 +if $rows != 2 then + return -1 +endi + +print ===== insert data + + sql insert into d1.t1 values(now, 3) sql insert into d2.t2 values(now, 3) sql insert into d3.t3 values(now, 3) From a257c6780b34a0104fbaabc8f0fd4983950d368c Mon Sep 17 00:00:00 2001 From: slguan Date: Mon, 27 Apr 2020 13:29:38 +0800 Subject: [PATCH 03/15] add script --- tests/script/sh/deploy.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/script/sh/deploy.sh b/tests/script/sh/deploy.sh index e2618a78cb..63c65d0345 100755 --- a/tests/script/sh/deploy.sh +++ b/tests/script/sh/deploy.sh @@ -93,7 +93,7 @@ echo "privateIp $NODE_IP" >> $TAOS_CFG echo "dDebugFlag 199" >> $TAOS_CFG echo "mDebugFlag 199" >> $TAOS_CFG echo "sdbDebugFlag 199" >> $TAOS_CFG -echo "rpcDebugFlag 131" >> $TAOS_CFG +echo "rpcDebugFlag 135" >> $TAOS_CFG echo "tmrDebugFlag 131" >> $TAOS_CFG echo "cDebugFlag 135" >> $TAOS_CFG echo "httpDebugFlag 135" >> $TAOS_CFG From fd76b5f71d3992d834e663c0e8a19474ccf4eb75 Mon Sep 17 00:00:00 2001 From: Shuduo Sang Date: Mon, 27 Apr 2020 14:49:18 +0800 Subject: [PATCH 04/15] port import_merge python testcase for 2.0 [TD-205] --- .travis.yml | 7 +- tests/pytest/fulltest.sh | 75 +++++++++++++++ tests/pytest/import_merge/__init__.py | 0 tests/pytest/import_merge/importBlock1H.py | 70 ++++++++++++++ tests/pytest/import_merge/importBlock1HO.py | 73 +++++++++++++++ tests/pytest/import_merge/importBlock1HPO.py | 75 +++++++++++++++ tests/pytest/import_merge/importBlock1S.py | 73 +++++++++++++++ tests/pytest/import_merge/importBlock1Sub.py | 73 +++++++++++++++ tests/pytest/import_merge/importBlock1T.py | 70 ++++++++++++++ tests/pytest/import_merge/importBlock1TO.py | 73 +++++++++++++++ tests/pytest/import_merge/importBlock1TPO.py | 75 +++++++++++++++ tests/pytest/import_merge/importBlock2H.py | 70 ++++++++++++++ tests/pytest/import_merge/importBlock2HO.py | 73 +++++++++++++++ tests/pytest/import_merge/importBlock2HPO.py | 75 +++++++++++++++ tests/pytest/import_merge/importBlock2S.py | 73 +++++++++++++++ tests/pytest/import_merge/importBlock2Sub.py | 73 +++++++++++++++ tests/pytest/import_merge/importBlock2T.py | 70 ++++++++++++++ tests/pytest/import_merge/importBlock2TO.py | 73 +++++++++++++++ tests/pytest/import_merge/importBlock2TPO.py | 75 +++++++++++++++ .../pytest/import_merge/importBlockbetween.py | 76 ++++++++++++++++ tests/pytest/import_merge/importCacheFileH.py | 86 ++++++++++++++++++ .../pytest/import_merge/importCacheFileHO.py | 89 ++++++++++++++++++ .../pytest/import_merge/importCacheFileHPO.py | 91 +++++++++++++++++++ tests/pytest/import_merge/importCacheFileS.py | 89 ++++++++++++++++++ .../pytest/import_merge/importCacheFileSub.py | 89 ++++++++++++++++++ tests/pytest/import_merge/importCacheFileT.py | 86 ++++++++++++++++++ .../pytest/import_merge/importCacheFileTO.py | 89 ++++++++++++++++++ .../pytest/import_merge/importCacheFileTPO.py | 91 +++++++++++++++++++ tests/pytest/import_merge/importDataH2.py | 91 +++++++++++++++++++ tests/pytest/import_merge/importDataHO.py | 80 ++++++++++++++++ tests/pytest/import_merge/importDataHO2.py | 80 ++++++++++++++++ tests/pytest/import_merge/importDataHPO.py | 82 +++++++++++++++++ tests/pytest/import_merge/importDataLastH.py | 76 ++++++++++++++++ tests/pytest/import_merge/importDataLastHO.py | 79 ++++++++++++++++ .../pytest/import_merge/importDataLastHPO.py | 81 +++++++++++++++++ tests/pytest/import_merge/importDataLastS.py | 79 ++++++++++++++++ .../pytest/import_merge/importDataLastSub.py | 79 ++++++++++++++++ tests/pytest/import_merge/importDataLastT.py | 72 +++++++++++++++ tests/pytest/import_merge/importDataLastTO.py | 75 +++++++++++++++ .../pytest/import_merge/importDataLastTPO.py | 77 ++++++++++++++++ tests/pytest/import_merge/importDataS.py | 75 +++++++++++++++ tests/pytest/import_merge/importDataSub.py | 80 ++++++++++++++++ tests/pytest/import_merge/importDataT.py | 72 +++++++++++++++ tests/pytest/import_merge/importDataTO.py | 75 +++++++++++++++ tests/pytest/import_merge/importDataTPO.py | 78 ++++++++++++++++ tests/pytest/import_merge/importHORestart.py | 66 ++++++++++++++ tests/pytest/import_merge/importHPORestart.py | 69 ++++++++++++++ tests/pytest/import_merge/importHRestart.py | 65 +++++++++++++ tests/pytest/import_merge/importHead.py | 68 ++++++++++++++ .../pytest/import_merge/importHeadOverlap.py | 63 +++++++++++++ .../import_merge/importHeadPartOverlap.py | 67 ++++++++++++++ tests/pytest/import_merge/importLastH.py | 72 +++++++++++++++ tests/pytest/import_merge/importLastHO.py | 75 +++++++++++++++ tests/pytest/import_merge/importLastHPO.py | 77 ++++++++++++++++ tests/pytest/import_merge/importLastS.py | 74 +++++++++++++++ tests/pytest/import_merge/importLastSub.py | 75 +++++++++++++++ tests/pytest/import_merge/importLastT.py | 76 ++++++++++++++++ tests/pytest/import_merge/importLastTO.py | 79 ++++++++++++++++ tests/pytest/import_merge/importLastTPO.py | 80 ++++++++++++++++ tests/pytest/import_merge/importSRestart.py | 73 +++++++++++++++ tests/pytest/import_merge/importSpan.py | 68 ++++++++++++++ tests/pytest/import_merge/importSubRestart.py | 73 +++++++++++++++ tests/pytest/import_merge/importTORestart.py | 73 +++++++++++++++ tests/pytest/import_merge/importTPORestart.py | 75 +++++++++++++++ tests/pytest/import_merge/importTRestart.py | 72 +++++++++++++++ tests/pytest/import_merge/importTail.py | 68 ++++++++++++++ .../pytest/import_merge/importTailOverlap.py | 70 ++++++++++++++ .../import_merge/importTailPartOverlap.py | 70 ++++++++++++++ tests/pytest/import_merge/importToCommit.py | 81 +++++++++++++++++ tests/pytest/simpletest.sh | 25 ----- tests/pytest/smoketest.sh | 55 +++++++++++ tests/test-all.sh | 7 +- 72 files changed, 5149 insertions(+), 30 deletions(-) create mode 100755 tests/pytest/fulltest.sh create mode 100644 tests/pytest/import_merge/__init__.py create mode 100644 tests/pytest/import_merge/importBlock1H.py create mode 100644 tests/pytest/import_merge/importBlock1HO.py create mode 100644 tests/pytest/import_merge/importBlock1HPO.py create mode 100644 tests/pytest/import_merge/importBlock1S.py create mode 100644 tests/pytest/import_merge/importBlock1Sub.py create mode 100644 tests/pytest/import_merge/importBlock1T.py create mode 100644 tests/pytest/import_merge/importBlock1TO.py create mode 100644 tests/pytest/import_merge/importBlock1TPO.py create mode 100644 tests/pytest/import_merge/importBlock2H.py create mode 100644 tests/pytest/import_merge/importBlock2HO.py create mode 100644 tests/pytest/import_merge/importBlock2HPO.py create mode 100644 tests/pytest/import_merge/importBlock2S.py create mode 100644 tests/pytest/import_merge/importBlock2Sub.py create mode 100644 tests/pytest/import_merge/importBlock2T.py create mode 100644 tests/pytest/import_merge/importBlock2TO.py create mode 100644 tests/pytest/import_merge/importBlock2TPO.py create mode 100644 tests/pytest/import_merge/importBlockbetween.py create mode 100644 tests/pytest/import_merge/importCacheFileH.py create mode 100644 tests/pytest/import_merge/importCacheFileHO.py create mode 100644 tests/pytest/import_merge/importCacheFileHPO.py create mode 100644 tests/pytest/import_merge/importCacheFileS.py create mode 100644 tests/pytest/import_merge/importCacheFileSub.py create mode 100644 tests/pytest/import_merge/importCacheFileT.py create mode 100644 tests/pytest/import_merge/importCacheFileTO.py create mode 100644 tests/pytest/import_merge/importCacheFileTPO.py create mode 100644 tests/pytest/import_merge/importDataH2.py create mode 100644 tests/pytest/import_merge/importDataHO.py create mode 100644 tests/pytest/import_merge/importDataHO2.py create mode 100644 tests/pytest/import_merge/importDataHPO.py create mode 100644 tests/pytest/import_merge/importDataLastH.py create mode 100644 tests/pytest/import_merge/importDataLastHO.py create mode 100644 tests/pytest/import_merge/importDataLastHPO.py create mode 100644 tests/pytest/import_merge/importDataLastS.py create mode 100644 tests/pytest/import_merge/importDataLastSub.py create mode 100644 tests/pytest/import_merge/importDataLastT.py create mode 100644 tests/pytest/import_merge/importDataLastTO.py create mode 100644 tests/pytest/import_merge/importDataLastTPO.py create mode 100644 tests/pytest/import_merge/importDataS.py create mode 100644 tests/pytest/import_merge/importDataSub.py create mode 100644 tests/pytest/import_merge/importDataT.py create mode 100644 tests/pytest/import_merge/importDataTO.py create mode 100644 tests/pytest/import_merge/importDataTPO.py create mode 100644 tests/pytest/import_merge/importHORestart.py create mode 100644 tests/pytest/import_merge/importHPORestart.py create mode 100644 tests/pytest/import_merge/importHRestart.py create mode 100644 tests/pytest/import_merge/importHead.py create mode 100644 tests/pytest/import_merge/importHeadOverlap.py create mode 100644 tests/pytest/import_merge/importHeadPartOverlap.py create mode 100644 tests/pytest/import_merge/importLastH.py create mode 100644 tests/pytest/import_merge/importLastHO.py create mode 100644 tests/pytest/import_merge/importLastHPO.py create mode 100644 tests/pytest/import_merge/importLastS.py create mode 100644 tests/pytest/import_merge/importLastSub.py create mode 100644 tests/pytest/import_merge/importLastT.py create mode 100644 tests/pytest/import_merge/importLastTO.py create mode 100644 tests/pytest/import_merge/importLastTPO.py create mode 100644 tests/pytest/import_merge/importSRestart.py create mode 100644 tests/pytest/import_merge/importSpan.py create mode 100644 tests/pytest/import_merge/importSubRestart.py create mode 100644 tests/pytest/import_merge/importTORestart.py create mode 100644 tests/pytest/import_merge/importTPORestart.py create mode 100644 tests/pytest/import_merge/importTRestart.py create mode 100644 tests/pytest/import_merge/importTail.py create mode 100644 tests/pytest/import_merge/importTailOverlap.py create mode 100644 tests/pytest/import_merge/importTailPartOverlap.py create mode 100644 tests/pytest/import_merge/importToCommit.py delete mode 100755 tests/pytest/simpletest.sh create mode 100755 tests/pytest/smoketest.sh diff --git a/.travis.yml b/.travis.yml index 39fddc20c9..7df3a7d7fe 100644 --- a/.travis.yml +++ b/.travis.yml @@ -46,10 +46,10 @@ matrix: pip3 install --user ${TRAVIS_BUILD_DIR}/src/connector/python/linux/python3/ cd ${TRAVIS_BUILD_DIR}/tests - ./test-all.sh || travis_terminate $? + ./test-all.sh $TRAVIS_EVENT_TYPE || travis_terminate $? cd ${TRAVIS_BUILD_DIR}/tests/pytest - ./simpletest.sh -g 2>&1 | tee mem-error-out.txt + ./smoketest.sh -g 2>&1 | tee mem-error-out.txt sleep 1 # Color setting @@ -86,13 +86,12 @@ matrix: addons: coverity_scan: - # GitHub project metadata # ** specific to your project ** project: name: TDengine version: 2.x - description: taosdata/TDengine + description: TDengine # Where email notification of build analysis results will be sent notification_email: sdsang@taosdata.com diff --git a/tests/pytest/fulltest.sh b/tests/pytest/fulltest.sh new file mode 100755 index 0000000000..1b879b3cc6 --- /dev/null +++ b/tests/pytest/fulltest.sh @@ -0,0 +1,75 @@ +#!/bin/bash +python3 ./test.py $1 -f insert/basic.py +python3 ./test.py $1 -f insert/int.py +python3 ./test.py $1 -f insert/float.py +python3 ./test.py $1 -f insert/bigint.py +python3 ./test.py $1 -f insert/bool.py +python3 ./test.py $1 -f insert/double.py +python3 ./test.py $1 -f insert/smallint.py +python3 ./test.py $1 -f insert/tinyint.py +python3 ./test.py $1 -f import_merge/importBlock1HO.py +python3 ./test.py $1 -f import_merge/importBlock1HPO.py +python3 ./test.py $1 -f import_merge/importBlock1H.py +python3 ./test.py $1 -f import_merge/importBlock1S.py +python3 ./test.py $1 -f import_merge/importBlock1Sub.py +python3 ./test.py $1 -f import_merge/importBlock1TO.py +python3 ./test.py $1 -f import_merge/importBlock1TPO.py +python3 ./test.py $1 -f import_merge/importBlock1T.py +python3 ./test.py $1 -f import_merge/importBlock2HO.py +python3 ./test.py $1 -f import_merge/importBlock2HPO.py +python3 ./test.py $1 -f import_merge/importBlock2H.py +python3 ./test.py $1 -f import_merge/importBlock2S.py +python3 ./test.py $1 -f import_merge/importBlock2Sub.py +python3 ./test.py $1 -f import_merge/importBlock2TO.py +python3 ./test.py $1 -f import_merge/importBlock2TPO.py +python3 ./test.py $1 -f import_merge/importBlock2T.py +python3 ./test.py $1 -f import_merge/importBlockbetween.py +python3 ./test.py $1 -f import_merge/importCacheFileHO.py +python3 ./test.py $1 -f import_merge/importCacheFileHPO.py +python3 ./test.py $1 -f import_merge/importCacheFileH.py +python3 ./test.py $1 -f import_merge/importCacheFileS.py +python3 ./test.py $1 -f import_merge/importCacheFileSub.py +python3 ./test.py $1 -f import_merge/importCacheFileTO.py +python3 ./test.py $1 -f import_merge/importCacheFileTPO.py +python3 ./test.py $1 -f import_merge/importCacheFileT.py +python3 ./test.py $1 -f import_merge/importDataH2.py +python3 ./test.py $1 -f import_merge/importDataHO2.py +python3 ./test.py $1 -f import_merge/importDataHO.py +python3 ./test.py $1 -f import_merge/importDataHPO.py +python3 ./test.py $1 -f import_merge/importDataLastHO.py +python3 ./test.py $1 -f import_merge/importDataLastHPO.py +python3 ./test.py $1 -f import_merge/importDataLastH.py +python3 ./test.py $1 -f import_merge/importDataLastS.py +python3 ./test.py $1 -f import_merge/importDataLastSub.py +python3 ./test.py $1 -f import_merge/importDataLastTO.py +python3 ./test.py $1 -f import_merge/importDataLastTPO.py +python3 ./test.py $1 -f import_merge/importDataLastT.py +python3 ./test.py $1 -f import_merge/importDataS.py +python3 ./test.py $1 -f import_merge/importDataSub.py +python3 ./test.py $1 -f import_merge/importDataTO.py +python3 ./test.py $1 -f import_merge/importDataTPO.py +python3 ./test.py $1 -f import_merge/importDataT.py +python3 ./test.py $1 -f import_merge/importHeadOverlap.py +python3 ./test.py $1 -f import_merge/importHeadPartOverlap.py +python3 ./test.py $1 -f import_merge/importHead.py +python3 ./test.py $1 -f import_merge/importHORestart.py +python3 ./test.py $1 -f import_merge/importHPORestart.py +python3 ./test.py $1 -f import_merge/importHRestart.py +python3 ./test.py $1 -f import_merge/importLastHO.py +python3 ./test.py $1 -f import_merge/importLastHPO.py +python3 ./test.py $1 -f import_merge/importLastH.py +python3 ./test.py $1 -f import_merge/importLastS.py +python3 ./test.py $1 -f import_merge/importLastSub.py +python3 ./test.py $1 -f import_merge/importLastTO.py +python3 ./test.py $1 -f import_merge/importLastTPO.py +python3 ./test.py $1 -f import_merge/importLastT.py +python3 ./test.py $1 -f import_merge/importSpan.py +python3 ./test.py $1 -f import_merge/importSRestart.py +python3 ./test.py $1 -f import_merge/importSubRestart.py +python3 ./test.py $1 -f import_merge/importTailOverlap.py +python3 ./test.py $1 -f import_merge/importTailPartOverlap.py +python3 ./test.py $1 -f import_merge/importTail.py +python3 ./test.py $1 -f import_merge/importToCommit.py +python3 ./test.py $1 -f import_merge/importTORestart.py +python3 ./test.py $1 -f import_merge/importTPORestart.py +python3 ./test.py $1 -f import_merge/importTRestart.py diff --git a/tests/pytest/import_merge/__init__.py b/tests/pytest/import_merge/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/pytest/import_merge/importBlock1H.py b/tests/pytest/import_merge/importBlock1H.py new file mode 100644 index 0000000000..8ef3735826 --- /dev/null +++ b/tests/pytest/import_merge/importBlock1H.py @@ -0,0 +1,70 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,39): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(39) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1HO.py b/tests/pytest/import_merge/importBlock1HO.py new file mode 100644 index 0000000000..e88c13970e --- /dev/null +++ b/tests/pytest/import_merge/importBlock1HO.py @@ -0,0 +1,73 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,39): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 10 data before with overlap") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(43) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1HPO.py b/tests/pytest/import_merge/importBlock1HPO.py new file mode 100644 index 0000000000..c0cbad5462 --- /dev/null +++ b/tests/pytest/import_merge/importBlock1HPO.py @@ -0,0 +1,75 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + for rid in range(15,43): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 20 data before with partly overlap") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1,21): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(47) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1S.py b/tests/pytest/import_merge/importBlock1S.py new file mode 100644 index 0000000000..849c1ece8e --- /dev/null +++ b/tests/pytest/import_merge/importBlock1S.py @@ -0,0 +1,73 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,39): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 50 data covering existing data") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1,51): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(50) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1Sub.py b/tests/pytest/import_merge/importBlock1Sub.py new file mode 100644 index 0000000000..ef75c49d09 --- /dev/null +++ b/tests/pytest/import_merge/importBlock1Sub.py @@ -0,0 +1,73 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,39): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1T.py b/tests/pytest/import_merge/importBlock1T.py new file mode 100644 index 0000000000..ac1fb5a8d9 --- /dev/null +++ b/tests/pytest/import_merge/importBlock1T.py @@ -0,0 +1,70 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,39): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 1 data after") + startTime = self.startTime + 38 + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+1, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(39) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1TO.py b/tests/pytest/import_merge/importBlock1TO.py new file mode 100644 index 0000000000..52580de81c --- /dev/null +++ b/tests/pytest/import_merge/importBlock1TO.py @@ -0,0 +1,73 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,39): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 10 data later with overlap") + startTime = self.startTime + 30 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(40) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1TPO.py b/tests/pytest/import_merge/importBlock1TPO.py new file mode 100644 index 0000000000..f1b6186212 --- /dev/null +++ b/tests/pytest/import_merge/importBlock1TPO.py @@ -0,0 +1,75 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,31): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + for rid in range(35,43): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 30 data later with partly overlap") + startTime = self.startTime + 25 + sqlcmd = ['import into tb1 values'] + for rid in range(1,31): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(55) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2H.py b/tests/pytest/import_merge/importBlock2H.py new file mode 100644 index 0000000000..24a2f1fd08 --- /dev/null +++ b/tests/pytest/import_merge/importBlock2H.py @@ -0,0 +1,70 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,77): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(77) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2HO.py b/tests/pytest/import_merge/importBlock2HO.py new file mode 100644 index 0000000000..37ab498ae8 --- /dev/null +++ b/tests/pytest/import_merge/importBlock2HO.py @@ -0,0 +1,73 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,77): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 10 data before with overlap") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(81) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2HPO.py b/tests/pytest/import_merge/importBlock2HPO.py new file mode 100644 index 0000000000..6243d2483f --- /dev/null +++ b/tests/pytest/import_merge/importBlock2HPO.py @@ -0,0 +1,75 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + for rid in range(15,81): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 20 data before with partly overlap") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1,21): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(85) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2S.py b/tests/pytest/import_merge/importBlock2S.py new file mode 100644 index 0000000000..4aa1dfd9dd --- /dev/null +++ b/tests/pytest/import_merge/importBlock2S.py @@ -0,0 +1,73 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,77): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 90 data covering existing data") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1,91): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(90) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2Sub.py b/tests/pytest/import_merge/importBlock2Sub.py new file mode 100644 index 0000000000..fe95b2d34d --- /dev/null +++ b/tests/pytest/import_merge/importBlock2Sub.py @@ -0,0 +1,73 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,77): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2T.py b/tests/pytest/import_merge/importBlock2T.py new file mode 100644 index 0000000000..4d5a4c942d --- /dev/null +++ b/tests/pytest/import_merge/importBlock2T.py @@ -0,0 +1,70 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,77): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 1 data after") + startTime = self.startTime + 76 + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+1, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(77) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2TO.py b/tests/pytest/import_merge/importBlock2TO.py new file mode 100644 index 0000000000..1bfea0a393 --- /dev/null +++ b/tests/pytest/import_merge/importBlock2TO.py @@ -0,0 +1,73 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,77): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 10 data later with overlap") + startTime = self.startTime + 70 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(80) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2TPO.py b/tests/pytest/import_merge/importBlock2TPO.py new file mode 100644 index 0000000000..b97b4b90b0 --- /dev/null +++ b/tests/pytest/import_merge/importBlock2TPO.py @@ -0,0 +1,75 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,61): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + for rid in range(65,81): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 30 data later with partly overlap") + startTime = self.startTime + 55 + sqlcmd = ['import into tb1 values'] + for rid in range(1,31): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(85) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlockbetween.py b/tests/pytest/import_merge/importBlockbetween.py new file mode 100644 index 0000000000..0dd9ed2bc2 --- /dev/null +++ b/tests/pytest/import_merge/importBlockbetween.py @@ -0,0 +1,76 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data with gap between 2 blocks") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,39): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + for rid in range(40,78): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import data into the gap between 2 blocks") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(39,40): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(77) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileH.py b/tests/pytest/import_merge/importCacheFileH.py new file mode 100644 index 0000000000..66155d7b5f --- /dev/null +++ b/tests/pytest/import_merge/importCacheFileH.py @@ -0,0 +1,86 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(21) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileHO.py b/tests/pytest/import_merge/importCacheFileHO.py new file mode 100644 index 0000000000..e53722f853 --- /dev/null +++ b/tests/pytest/import_merge/importCacheFileHO.py @@ -0,0 +1,89 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 10 data before with overlap") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(25) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileHPO.py b/tests/pytest/import_merge/importCacheFileHPO.py new file mode 100644 index 0000000000..5e869534ba --- /dev/null +++ b/tests/pytest/import_merge/importCacheFileHPO.py @@ -0,0 +1,91 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 9 sequential data with gap") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + if (rid == 5): continue + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(9) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 9 data again with gap") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + if (rid == 5): continue + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(18) + + tdLog.info("================= step7") + tdLog.info("import 20 data before with partly overlap") + startTime = self.startTime - 3 + sqlcmd = ['import into tb1 values'] + for rid in range(1,21): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(23) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileS.py b/tests/pytest/import_merge/importCacheFileS.py new file mode 100644 index 0000000000..151ce8d9ab --- /dev/null +++ b/tests/pytest/import_merge/importCacheFileS.py @@ -0,0 +1,89 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 30 data covering existing data") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1,31): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(30) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileSub.py b/tests/pytest/import_merge/importCacheFileSub.py new file mode 100644 index 0000000000..6237caeec4 --- /dev/null +++ b/tests/pytest/import_merge/importCacheFileSub.py @@ -0,0 +1,89 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileT.py b/tests/pytest/import_merge/importCacheFileT.py new file mode 100644 index 0000000000..70d6fa09ff --- /dev/null +++ b/tests/pytest/import_merge/importCacheFileT.py @@ -0,0 +1,86 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 1 data later") + startTime = self.startTime + 20 + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+1, rid)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(21) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileTO.py b/tests/pytest/import_merge/importCacheFileTO.py new file mode 100644 index 0000000000..16ec29bb02 --- /dev/null +++ b/tests/pytest/import_merge/importCacheFileTO.py @@ -0,0 +1,89 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 10 data later with overlap") + startTime = self.startTime + 15 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(25) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileTPO.py b/tests/pytest/import_merge/importCacheFileTPO.py new file mode 100644 index 0000000000..8b61e1ca72 --- /dev/null +++ b/tests/pytest/import_merge/importCacheFileTPO.py @@ -0,0 +1,91 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 9 sequential data with gap") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + if (rid == 7): continue + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(9) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 9 data again with gap") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + if (rid == 7): continue + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(18) + + tdLog.info("================= step7") + tdLog.info("import 20 data later with partly overlap") + startTime = self.startTime + 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1,21): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(25) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataH2.py b/tests/pytest/import_merge/importDataH2.py new file mode 100644 index 0000000000..0df7d67d88 --- /dev/null +++ b/tests/pytest/import_merge/importDataH2.py @@ -0,0 +1,91 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + + tdLog.info("================= step2") + tdLog.info("import %d sequential data" %(self.rows/2)) + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,self.rows/2+1): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(self.rows/2) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime + 1, 1)) + + + tdLog.info("================= step7") + tdSql.execute('reset query cache') + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(self.rows/2 + 1) + + tdLog.info("================= step8") + tdLog.info("import 10 data in batch before") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime - rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step9") + tdSql.execute('reset query cache') + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(self.rows/2 + 11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataHO.py b/tests/pytest/import_merge/importDataHO.py new file mode 100644 index 0000000000..84dcb82a57 --- /dev/null +++ b/tests/pytest/import_merge/importDataHO.py @@ -0,0 +1,80 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + self.rowsPerTable = 20 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + + tdLog.info("================= step2") + tdLog.info("import %d sequential data" %self.rowsPerTable) + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,self.rowsPerTable+1): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select count(*) from tb1') + tdSql.checkData(0, 0, self.rowsPerTable) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 6 data before with overlap") + startTime = self.startTime - 3 + sqlcmd = ['import into tb1 values'] + for rid in range(1,7): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(self.rowsPerTable + 3) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataHO2.py b/tests/pytest/import_merge/importDataHO2.py new file mode 100644 index 0000000000..8077849cc1 --- /dev/null +++ b/tests/pytest/import_merge/importDataHO2.py @@ -0,0 +1,80 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + self.rowsPerTable = 100 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + + tdLog.info("================= step2") + tdLog.info("import %d sequential data" %self.rowsPerTable) + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,self.rowsPerTable+1): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select count(*) from tb1') + tdSql.checkData(0, 0, self.rowsPerTable) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 6 data before with overlap") + startTime = self.startTime - 3 + sqlcmd = ['import into tb1 values'] + for rid in range(6, 0, -1): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(self.rowsPerTable+3) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataHPO.py b/tests/pytest/import_merge/importDataHPO.py new file mode 100644 index 0000000000..a73ae9c103 --- /dev/null +++ b/tests/pytest/import_merge/importDataHPO.py @@ -0,0 +1,82 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + self.rowsPerTable = 20 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + + tdLog.info("================= step2") + tdLog.info("import %d sequential data" %self.rowsPerTable) + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,10): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + for rid in range(14,self.rowsPerTable+5): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select count(*) from tb1') + tdSql.checkData(0, 0, self.rowsPerTable) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 20 data before with partly overlap") + startTime = self.startTime - 4 + sqlcmd = ['import into tb1 values'] + for rid in range(1,21): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(self.rowsPerTable+8) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastH.py b/tests/pytest/import_merge/importDataLastH.py new file mode 100644 index 0000000000..c867703e13 --- /dev/null +++ b/tests/pytest/import_merge/importDataLastH.py @@ -0,0 +1,76 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than %d rows less than %d rows will go to data and last file" %(self.rows, 10+self.rows)) + + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,206): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime + 1, 1)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(206) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastHO.py b/tests/pytest/import_merge/importDataLastHO.py new file mode 100644 index 0000000000..68e858f781 --- /dev/null +++ b/tests/pytest/import_merge/importDataLastHO.py @@ -0,0 +1,79 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than %d rows less than %d rows will go to data and last file" %(self.rows, 10+self.rows)) + + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,206): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 5 data before with overlap") + startTime = self.startTime - 2 + sqlcmd = ['import into tb1 values'] + for rid in range(1,6): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(207) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastHPO.py b/tests/pytest/import_merge/importDataLastHPO.py new file mode 100644 index 0000000000..29b71a7a8b --- /dev/null +++ b/tests/pytest/import_merge/importDataLastHPO.py @@ -0,0 +1,81 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than %d rows less than %d rows will go to data and last file" %(self.rows, 10+self.rows)) + + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + for rid in range(14,209): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 20 data before with partly overlap") + startTime = self.startTime - 2 + sqlcmd = ['import into tb1 values'] + for rid in range(1,21): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(210) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastS.py b/tests/pytest/import_merge/importDataLastS.py new file mode 100644 index 0000000000..cb61f5d088 --- /dev/null +++ b/tests/pytest/import_merge/importDataLastS.py @@ -0,0 +1,79 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than %d rows less than %d rows will go to data and last file" %(self.rows, 10+self.rows)) + + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,206): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 250 data covering the existing data") + startTime = self.startTime - 15 + sqlcmd = ['import into tb1 values'] + for rid in range(1,251): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(250) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastSub.py b/tests/pytest/import_merge/importDataLastSub.py new file mode 100644 index 0000000000..2b30c7c7c8 --- /dev/null +++ b/tests/pytest/import_merge/importDataLastSub.py @@ -0,0 +1,79 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than %d rows less than %d rows will go to data and last file" %(self.rows, 10+self.rows)) + + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,206): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastT.py b/tests/pytest/import_merge/importDataLastT.py new file mode 100644 index 0000000000..fecf64e495 --- /dev/null +++ b/tests/pytest/import_merge/importDataLastT.py @@ -0,0 +1,72 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than %d rows less than %d rows will go to data and last file" %(self.rows, 10+self.rows)) + + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,206): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data later") + startTime = self.startTime + 205 + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime + 1, 1)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(206) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastTO.py b/tests/pytest/import_merge/importDataLastTO.py new file mode 100644 index 0000000000..700c2b8f04 --- /dev/null +++ b/tests/pytest/import_merge/importDataLastTO.py @@ -0,0 +1,75 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than %d rows less than %d rows will go to data and last file" %(self.rows, 10+self.rows)) + + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,206): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 5 data later with overlap") + startTime = self.startTime + 203 + sqlcmd = ['import into tb1 values'] + for rid in range(1,6): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(208) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastTPO.py b/tests/pytest/import_merge/importDataLastTPO.py new file mode 100644 index 0000000000..2ae43378e8 --- /dev/null +++ b/tests/pytest/import_merge/importDataLastTPO.py @@ -0,0 +1,77 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than %d rows less than %d rows will go to data and last file" %(self.rows, 10+self.rows)) + + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,196): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + for rid in range(200,210): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 20 data later with partly overlap") + startTime = self.startTime + 192 + sqlcmd = ['import into tb1 values'] + for rid in range(1,21): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(212) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataS.py b/tests/pytest/import_merge/importDataS.py new file mode 100644 index 0000000000..0a1849f5f7 --- /dev/null +++ b/tests/pytest/import_merge/importDataS.py @@ -0,0 +1,75 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + + tdLog.info("================= step2") + tdLog.info("import 20 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,21): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 30 data covering the existing data") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1,31): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(30) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataSub.py b/tests/pytest/import_merge/importDataSub.py new file mode 100644 index 0000000000..cd7b37b921 --- /dev/null +++ b/tests/pytest/import_merge/importDataSub.py @@ -0,0 +1,80 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + + tdLog.info("================= step2") + tdLog.info("import %d sequential data" %(self.rows/2)) + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,self.rows/2+1): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(self.rows/2) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1,11): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step9") + tdSql.execute('reset query cache') + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(self.rows/2) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataT.py b/tests/pytest/import_merge/importDataT.py new file mode 100644 index 0000000000..8054d0a13c --- /dev/null +++ b/tests/pytest/import_merge/importDataT.py @@ -0,0 +1,72 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + + tdLog.info("================= step2") + tdLog.info("import 20 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,21): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data later") + startTime = self.startTime + 20 + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime + 1, 1)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(21) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataTO.py b/tests/pytest/import_merge/importDataTO.py new file mode 100644 index 0000000000..e808c7382d --- /dev/null +++ b/tests/pytest/import_merge/importDataTO.py @@ -0,0 +1,75 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + + tdLog.info("================= step2") + tdLog.info("import 20 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,21): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 6 data later with overlap") + startTime = self.startTime + 18 + sqlcmd = ['import into tb1 values'] + for rid in range(1,7): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(24) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataTPO.py b/tests/pytest/import_merge/importDataTPO.py new file mode 100644 index 0000000000..c9d454103d --- /dev/null +++ b/tests/pytest/import_merge/importDataTPO.py @@ -0,0 +1,78 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + + tdLog.info("================= step2") + tdLog.info("import 20 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,18): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + for rid in range(22,25): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + tdLog.sleep(5) + + tdLog.info("================= step5") + tdLog.info("import 20 data later with partly overlap") + startTime = self.startTime + 15 + sqlcmd = ['import into tb1 values'] + for rid in range(1,21): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(35) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHORestart.py b/tests/pytest/import_merge/importHORestart.py new file mode 100644 index 0000000000..0d7bfab30a --- /dev/null +++ b/tests/pytest/import_merge/importHORestart.py @@ -0,0 +1,66 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdSql.prepare() + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 5 data before with overlap") + startTime = self.startTime - 2 + for rid in range(1,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(12) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHPORestart.py b/tests/pytest/import_merge/importHPORestart.py new file mode 100644 index 0000000000..3be6e74a14 --- /dev/null +++ b/tests/pytest/import_merge/importHPORestart.py @@ -0,0 +1,69 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdSql.prepare() + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 8 sequential data with gap") + startTime = self.startTime + for rid in range(1,4): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + for rid in range(6,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(8) + + tdLog.info("================= step4") + tdLog.info("import 8 data before with partly overlap") + startTime = self.startTime - 2 + for rid in range(1, 9): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(12) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHRestart.py b/tests/pytest/import_merge/importHRestart.py new file mode 100644 index 0000000000..3b885f7d45 --- /dev/null +++ b/tests/pytest/import_merge/importHRestart.py @@ -0,0 +1,65 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdSql.prepare() + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHead.py b/tests/pytest/import_merge/importHead.py new file mode 100644 index 0000000000..6b34e2247f --- /dev/null +++ b/tests/pytest/import_merge/importHead.py @@ -0,0 +1,68 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHeadOverlap.py b/tests/pytest/import_merge/importHeadOverlap.py new file mode 100644 index 0000000000..ed17615957 --- /dev/null +++ b/tests/pytest/import_merge/importHeadOverlap.py @@ -0,0 +1,63 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdSql.prepare() + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 5 data before with overlap") + startTime = self.startTime - 2 + for rid in range(1,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + startTime += 1 + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(12) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHeadPartOverlap.py b/tests/pytest/import_merge/importHeadPartOverlap.py new file mode 100644 index 0000000000..a002edcd5a --- /dev/null +++ b/tests/pytest/import_merge/importHeadPartOverlap.py @@ -0,0 +1,67 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdSql.prepare() + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 8 sequential data with gap") + startTime = self.startTime + for rid in range(1,4): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + startTime += 1 + startTime += 2 + for rid in range(6,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(8) + + tdLog.info("================= step4") + tdLog.info("import 8 data before with partly overlap") + startTime = self.startTime - 2 + for rid in range(1, 9): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + startTime += 1 + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(12) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastH.py b/tests/pytest/import_merge/importLastH.py new file mode 100644 index 0000000000..d5096845c7 --- /dev/null +++ b/tests/pytest/import_merge/importLastH.py @@ -0,0 +1,72 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") + + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,6): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime + 1, 1)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(6) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastHO.py b/tests/pytest/import_merge/importLastHO.py new file mode 100644 index 0000000000..13bfcaea70 --- /dev/null +++ b/tests/pytest/import_merge/importLastHO.py @@ -0,0 +1,75 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") + + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,6): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 4 data before with overlap") + startTime = self.startTime - 2 + sqlcmd = ['import into tb1 values'] + for rid in range(1,5): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(7) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastHPO.py b/tests/pytest/import_merge/importLastHPO.py new file mode 100644 index 0000000000..73a0a1d935 --- /dev/null +++ b/tests/pytest/import_merge/importLastHPO.py @@ -0,0 +1,77 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") + + tdLog.info("================= step2") + tdLog.info("import 6 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,4): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + for rid in range(6,9): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(6) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 8 data before with partly overlap") + startTime = self.startTime - 2 + sqlcmd = ['import into tb1 values'] + for rid in range(1,9): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastS.py b/tests/pytest/import_merge/importLastS.py new file mode 100644 index 0000000000..16ce3b464c --- /dev/null +++ b/tests/pytest/import_merge/importLastS.py @@ -0,0 +1,74 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") + + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,6): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 20 data covering existing data") + startTime = self.startTime - 10 + for rid in range(1,21): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastSub.py b/tests/pytest/import_merge/importLastSub.py new file mode 100644 index 0000000000..68d1a57f97 --- /dev/null +++ b/tests/pytest/import_merge/importLastSub.py @@ -0,0 +1,75 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") + + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,6): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 3 data totally repetitive") + startTime = self.startTime + 1 + sqlcmd = ['import into tb1 values'] + for rid in range(1,4): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastT.py b/tests/pytest/import_merge/importLastT.py new file mode 100644 index 0000000000..4c33b4dd01 --- /dev/null +++ b/tests/pytest/import_merge/importLastT.py @@ -0,0 +1,76 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") + + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,6): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data later") + startTime = self.startTime + 5 + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime + 1, 1)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(6) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastTO.py b/tests/pytest/import_merge/importLastTO.py new file mode 100644 index 0000000000..e89fa342ae --- /dev/null +++ b/tests/pytest/import_merge/importLastTO.py @@ -0,0 +1,79 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") + + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,6): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 4 data later with overlap") + startTime = self.startTime + 3 + sqlcmd = ['import into tb1 values'] + for rid in range(1,5): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(7) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastTPO.py b/tests/pytest/import_merge/importLastTPO.py new file mode 100644 index 0000000000..2b673bf152 --- /dev/null +++ b/tests/pytest/import_merge/importLastTPO.py @@ -0,0 +1,80 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' %self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") + + tdLog.info("================= step2") + tdLog.info("import 6 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1,4): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + for rid in range(6,9): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(6) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 8 data later with partly overlap") + startTime = self.startTime + 2 + for rid in range(1,9): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importSRestart.py b/tests/pytest/import_merge/importSRestart.py new file mode 100644 index 0000000000..5636ab5015 --- /dev/null +++ b/tests/pytest/import_merge/importSRestart.py @@ -0,0 +1,73 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 20 data cover existing data") + startTime = self.startTime - 5 + for rid in range(1,21): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importSpan.py b/tests/pytest/import_merge/importSpan.py new file mode 100644 index 0000000000..e57062a7b5 --- /dev/null +++ b/tests/pytest/import_merge/importSpan.py @@ -0,0 +1,68 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 20 data cover existing data") + startTime = self.startTime - 5 + for rid in range(1,21): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importSubRestart.py b/tests/pytest/import_merge/importSubRestart.py new file mode 100644 index 0000000000..aae79d787b --- /dev/null +++ b/tests/pytest/import_merge/importSubRestart.py @@ -0,0 +1,73 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 20 sequential data") + startTime = self.startTime + for rid in range(1,21): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step4") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + 5 + for rid in range(1,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTORestart.py b/tests/pytest/import_merge/importTORestart.py new file mode 100644 index 0000000000..6a0621568d --- /dev/null +++ b/tests/pytest/import_merge/importTORestart.py @@ -0,0 +1,73 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 6 data after with overlap") + startTime = self.startTime + 8 + for rid in range(1,7): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(14) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTPORestart.py b/tests/pytest/import_merge/importTPORestart.py new file mode 100644 index 0000000000..0ccb481890 --- /dev/null +++ b/tests/pytest/import_merge/importTPORestart.py @@ -0,0 +1,75 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 8 sequential data with gap") + startTime = self.startTime + for rid in range(1,6): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + for rid in range(8,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(8) + + tdLog.info("================= step4") + tdLog.info("import 8 data after with partly overlap") + startTime = self.startTime + 3 + for rid in range(1,9): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTRestart.py b/tests/pytest/import_merge/importTRestart.py new file mode 100644 index 0000000000..a80a3df0c7 --- /dev/null +++ b/tests/pytest/import_merge/importTRestart.py @@ -0,0 +1,72 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 1 data after") + startTime = self.startTime + 11 + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTail.py b/tests/pytest/import_merge/importTail.py new file mode 100644 index 0000000000..4cfa248ecb --- /dev/null +++ b/tests/pytest/import_merge/importTail.py @@ -0,0 +1,68 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 1 data after") + startTime += 1 + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTailOverlap.py b/tests/pytest/import_merge/importTailOverlap.py new file mode 100644 index 0000000000..6cbf9d2e14 --- /dev/null +++ b/tests/pytest/import_merge/importTailOverlap.py @@ -0,0 +1,70 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 6 data after with overlap") + startTime = self.startTime + 8 + for rid in range(1,7): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + startTime += 1 + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(14) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTailPartOverlap.py b/tests/pytest/import_merge/importTailPartOverlap.py new file mode 100644 index 0000000000..473b0314d8 --- /dev/null +++ b/tests/pytest/import_merge/importTailPartOverlap.py @@ -0,0 +1,70 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 8 sequential data with gap") + startTime = self.startTime + for rid in range(1,6): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + for rid in range(8,11): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(8) + + tdLog.info("================= step4") + tdLog.info("import 8 data after with partly overlap") + startTime = self.startTime + 3 + for rid in range(1,9): + tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importToCommit.py b/tests/pytest/import_merge/importToCommit.py new file mode 100644 index 0000000000..9e14e3cd17 --- /dev/null +++ b/tests/pytest/import_merge/importToCommit.py @@ -0,0 +1,81 @@ +################################################################### + # Copyright (c) 2016 by TAOS Technologies, Inc. + # All rights reserved. + # + # This file is proprietary and confidential to TAOS Technologies. + # No part of this file may be reproduced, stored, transmitted, + # disclosed or used in any form or by any means other than as + # expressly provided by the written permission from Jianhui Tao + # +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512 tables 10') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + tdLog.info("one block can import 38 records and totally there are 40 blocks") + + tdLog.info("================= step2") + tdLog.info('insert data until the first commit') + dnodesDir = tdDnodes.getDnodesRootDir() + dataDir = dnodesDir + '/dnode1/data/data' + startTime = self.startTime + rid0 = 1 + while (True): + sqlcmd = 'insert into tb1 values(%ld, %d)' %(startTime+rid0*2, rid0) + tdSql.execute(sqlcmd) + rid0 += 1 + vnodes = os.listdir(dataDir) + if (len(vnodes) > 0): + tdLog.info("data is committed, stop inserting") + break + + tdLog.info("================= step5") + tdLog.info("import 1 data before ") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(3,4): + sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.execute('reset query cache') + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(rid0-1+1) + + tdLog.info("================= step7") + tdSql.execute('reset query cache') + tdSql.query('select count(*) from tb1') + tdSql.checkData(0, 0, rid0-1+1) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/simpletest.sh b/tests/pytest/simpletest.sh deleted file mode 100755 index 73b25d2056..0000000000 --- a/tests/pytest/simpletest.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -python3 ./test.py $1 -f insert/basic.py -python3 ./test.py -s $1 -sleep 1 -python3 ./test.py $1 -f insert/int.py -python3 ./test.py -s $1 -sleep 1 -python3 ./test.py $1 -f insert/float.py -python3 ./test.py -s $1 -sleep 1 -python3 ./test.py $1 -f insert/bigint.py -python3 ./test.py -s $1 -sleep 1 -python3 ./test.py $1 -f insert/bool.py -python3 ./test.py -s $1 -sleep 1 -python3 ./test.py $1 -f insert/double.py -python3 ./test.py -s $1 -sleep 1 -python3 ./test.py $1 -f insert/smallint.py -python3 ./test.py -s $1 -sleep 1 -python3 ./test.py $1 -f insert/tinyint.py -python3 ./test.py -s $1 -sleep 1 diff --git a/tests/pytest/smoketest.sh b/tests/pytest/smoketest.sh new file mode 100755 index 0000000000..71d19df5c0 --- /dev/null +++ b/tests/pytest/smoketest.sh @@ -0,0 +1,55 @@ +#!/bin/bash +python3 ./test.py $1 -f insert/basic.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f insert/int.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f insert/float.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f insert/bigint.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f insert/bool.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f insert/double.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f insert/smallint.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f insert/tinyint.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importDataLastTO.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importDataLastT.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importDataTO.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importDataT.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importHeadOverlap.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importHeadPartOverlap.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importHORestart.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importHPORestart.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importHRestart.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importLastSub.py +python3 ./test.py -s $1 +sleep 1 diff --git a/tests/test-all.sh b/tests/test-all.sh index 6943dd47a7..907ef4bedd 100755 --- a/tests/test-all.sh +++ b/tests/test-all.sh @@ -25,7 +25,12 @@ if [ "$totalFailed" -ne "0" ]; then fi cd ../pytest -./simpletest.sh 2>&1 | tee pytest-out.txt + +if [ "$1" == "cron" ]; then + ./fulltest.sh 2>&1 | tee pytest-out.txt +else + ./smoketest.sh 2>&1 | tee pytest-out.txt +fi totalPySuccess=`grep 'successfully executed' pytest-out.txt | wc -l` if [ "$totalPySuccess" -gt "0" ]; then From 2777a2f0c931e1d92bf636591b364fe05a6af878 Mon Sep 17 00:00:00 2001 From: Shuduo Sang Date: Mon, 27 Apr 2020 15:23:15 +0800 Subject: [PATCH 05/15] fix python source code for pep8 standard. --- tests/pytest/import_merge/importBlock1H.py | 100 +++++++------ tests/pytest/import_merge/importBlock1HO.py | 106 ++++++------- tests/pytest/import_merge/importBlock1HPO.py | 110 +++++++------- tests/pytest/import_merge/importBlock1S.py | 106 ++++++------- tests/pytest/import_merge/importBlock1Sub.py | 106 ++++++------- tests/pytest/import_merge/importBlock1T.py | 100 +++++++------ tests/pytest/import_merge/importBlock1TO.py | 106 ++++++------- tests/pytest/import_merge/importBlock1TPO.py | 110 +++++++------- tests/pytest/import_merge/importBlock2H.py | 100 +++++++------ tests/pytest/import_merge/importBlock2HO.py | 106 ++++++------- tests/pytest/import_merge/importBlock2HPO.py | 110 +++++++------- tests/pytest/import_merge/importBlock2S.py | 106 ++++++------- tests/pytest/import_merge/importBlock2Sub.py | 106 ++++++------- tests/pytest/import_merge/importBlock2T.py | 100 +++++++------ tests/pytest/import_merge/importBlock2TO.py | 106 ++++++------- tests/pytest/import_merge/importBlock2TPO.py | 110 +++++++------- .../pytest/import_merge/importBlockbetween.py | 112 +++++++------- tests/pytest/import_merge/importCacheFileH.py | 126 ++++++++-------- .../pytest/import_merge/importCacheFileHO.py | 132 ++++++++-------- .../pytest/import_merge/importCacheFileHPO.py | 138 ++++++++--------- tests/pytest/import_merge/importCacheFileS.py | 132 ++++++++-------- .../pytest/import_merge/importCacheFileSub.py | 132 ++++++++-------- tests/pytest/import_merge/importCacheFileT.py | 126 ++++++++-------- .../pytest/import_merge/importCacheFileTO.py | 132 ++++++++-------- .../pytest/import_merge/importCacheFileTPO.py | 138 ++++++++--------- tests/pytest/import_merge/importDataH2.py | 141 +++++++++--------- tests/pytest/import_merge/importDataHO.py | 120 ++++++++------- tests/pytest/import_merge/importDataHO2.py | 120 ++++++++------- tests/pytest/import_merge/importDataHPO.py | 124 +++++++-------- tests/pytest/import_merge/importDataLastH.py | 112 +++++++------- tests/pytest/import_merge/importDataLastHO.py | 118 ++++++++------- .../pytest/import_merge/importDataLastHPO.py | 122 +++++++-------- tests/pytest/import_merge/importDataLastS.py | 118 ++++++++------- .../pytest/import_merge/importDataLastSub.py | 118 ++++++++------- tests/pytest/import_merge/importDataLastT.py | 106 ++++++------- tests/pytest/import_merge/importDataLastTO.py | 112 +++++++------- .../pytest/import_merge/importDataLastTPO.py | 116 +++++++------- tests/pytest/import_merge/importDataS.py | 112 +++++++------- tests/pytest/import_merge/importDataSub.py | 120 ++++++++------- tests/pytest/import_merge/importDataT.py | 106 ++++++------- tests/pytest/import_merge/importDataTO.py | 112 +++++++------- tests/pytest/import_merge/importDataTPO.py | 118 ++++++++------- tests/pytest/import_merge/importHORestart.py | 96 ++++++------ tests/pytest/import_merge/importHPORestart.py | 104 +++++++------ tests/pytest/import_merge/importHRestart.py | 92 ++++++------ tests/pytest/import_merge/importHead.py | 96 ++++++------ .../pytest/import_merge/importHeadOverlap.py | 88 +++++------ .../import_merge/importHeadPartOverlap.py | 96 ++++++------ tests/pytest/import_merge/importLastH.py | 104 ++++++------- tests/pytest/import_merge/importLastHO.py | 110 +++++++------- tests/pytest/import_merge/importLastHPO.py | 114 +++++++------- tests/pytest/import_merge/importLastS.py | 108 +++++++------- tests/pytest/import_merge/importLastSub.py | 110 +++++++------- tests/pytest/import_merge/importLastT.py | 110 +++++++------- tests/pytest/import_merge/importLastTO.py | 116 +++++++------- tests/pytest/import_merge/importLastTPO.py | 118 ++++++++------- tests/pytest/import_merge/importSRestart.py | 108 +++++++------- tests/pytest/import_merge/importSpan.py | 100 +++++++------ tests/pytest/import_merge/importSubRestart.py | 108 +++++++------- tests/pytest/import_merge/importTORestart.py | 108 +++++++------- tests/pytest/import_merge/importTPORestart.py | 112 +++++++------- tests/pytest/import_merge/importTRestart.py | 104 ++++++------- tests/pytest/import_merge/importTail.py | 96 ++++++------ .../pytest/import_merge/importTailOverlap.py | 100 +++++++------ .../import_merge/importTailPartOverlap.py | 104 +++++++------ tests/pytest/import_merge/importToCommit.py | 126 ++++++++-------- tests/pytest/query/tbname.py | 15 +- tests/pytest/util/sql.py | 4 +- 68 files changed, 3808 insertions(+), 3590 deletions(-) diff --git a/tests/pytest/import_merge/importBlock1H.py b/tests/pytest/import_merge/importBlock1H.py index 8ef3735826..a1ba905b17 100644 --- a/tests/pytest/import_merge/importBlock1H.py +++ b/tests/pytest/import_merge/importBlock1H.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,53 +18,55 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 38 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,39): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(38) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import 1 data before") - startTime = self.startTime - 1 - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 39): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(39) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(39) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1HO.py b/tests/pytest/import_merge/importBlock1HO.py index e88c13970e..73aec07a90 100644 --- a/tests/pytest/import_merge/importBlock1HO.py +++ b/tests/pytest/import_merge/importBlock1HO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,56 +18,58 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 38 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,39): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(38) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import 10 data before with overlap") - startTime = self.startTime - 5 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 39): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 10 data before with overlap") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(43) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(43) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1HPO.py b/tests/pytest/import_merge/importBlock1HPO.py index c0cbad5462..ad224e5c65 100644 --- a/tests/pytest/import_merge/importBlock1HPO.py +++ b/tests/pytest/import_merge/importBlock1HPO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,58 +18,60 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 38 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - for rid in range(15,43): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(38) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import 20 data before with partly overlap") - startTime = self.startTime - 5 - sqlcmd = ['import into tb1 values'] - for rid in range(1,21): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(15, 43): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 20 data before with partly overlap") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(47) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(47) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1S.py b/tests/pytest/import_merge/importBlock1S.py index 849c1ece8e..37c2ad6631 100644 --- a/tests/pytest/import_merge/importBlock1S.py +++ b/tests/pytest/import_merge/importBlock1S.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,56 +18,58 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 38 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,39): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(38) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import 50 data covering existing data") - startTime = self.startTime - 5 - sqlcmd = ['import into tb1 values'] - for rid in range(1,51): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 39): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 50 data covering existing data") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 51): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(50) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(50) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1Sub.py b/tests/pytest/import_merge/importBlock1Sub.py index ef75c49d09..5228563651 100644 --- a/tests/pytest/import_merge/importBlock1Sub.py +++ b/tests/pytest/import_merge/importBlock1Sub.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,56 +18,58 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 38 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,39): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(38) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import 10 data totally repetitive") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 39): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(38) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1T.py b/tests/pytest/import_merge/importBlock1T.py index ac1fb5a8d9..75f41b98cf 100644 --- a/tests/pytest/import_merge/importBlock1T.py +++ b/tests/pytest/import_merge/importBlock1T.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,53 +18,55 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 38 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,39): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(38) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import 1 data after") - startTime = self.startTime + 38 - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+1, rid)) + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 39): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 1 data after") + startTime = self.startTime + 38 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(39) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(39) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1TO.py b/tests/pytest/import_merge/importBlock1TO.py index 52580de81c..b43428da27 100644 --- a/tests/pytest/import_merge/importBlock1TO.py +++ b/tests/pytest/import_merge/importBlock1TO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,56 +18,58 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 38 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,39): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(38) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import 10 data later with overlap") - startTime = self.startTime + 30 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 39): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 10 data later with overlap") + startTime = self.startTime + 30 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(40) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(40) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1TPO.py b/tests/pytest/import_merge/importBlock1TPO.py index f1b6186212..913ca1cc02 100644 --- a/tests/pytest/import_merge/importBlock1TPO.py +++ b/tests/pytest/import_merge/importBlock1TPO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,58 +18,60 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 38 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,31): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - for rid in range(35,43): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(38) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import 30 data later with partly overlap") - startTime = self.startTime + 25 - sqlcmd = ['import into tb1 values'] - for rid in range(1,31): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 31): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(35, 43): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 30 data later with partly overlap") + startTime = self.startTime + 25 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 31): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(55) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(55) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2H.py b/tests/pytest/import_merge/importBlock2H.py index 24a2f1fd08..bacd88cbe7 100644 --- a/tests/pytest/import_merge/importBlock2H.py +++ b/tests/pytest/import_merge/importBlock2H.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,53 +18,55 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 76 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,77): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(76) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import 1 data before") - startTime = self.startTime - 1 - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 77): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(77) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(77) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2HO.py b/tests/pytest/import_merge/importBlock2HO.py index 37ab498ae8..01c0f622b6 100644 --- a/tests/pytest/import_merge/importBlock2HO.py +++ b/tests/pytest/import_merge/importBlock2HO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,56 +18,58 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 76 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,77): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(76) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import 10 data before with overlap") - startTime = self.startTime - 5 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 77): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 10 data before with overlap") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(81) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(81) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2HPO.py b/tests/pytest/import_merge/importBlock2HPO.py index 6243d2483f..ee8d580dfe 100644 --- a/tests/pytest/import_merge/importBlock2HPO.py +++ b/tests/pytest/import_merge/importBlock2HPO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,58 +18,60 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 76 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - for rid in range(15,81): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(76) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import 20 data before with partly overlap") - startTime = self.startTime - 5 - sqlcmd = ['import into tb1 values'] - for rid in range(1,21): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(15, 81): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 20 data before with partly overlap") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(85) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(85) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2S.py b/tests/pytest/import_merge/importBlock2S.py index 4aa1dfd9dd..d85074bfeb 100644 --- a/tests/pytest/import_merge/importBlock2S.py +++ b/tests/pytest/import_merge/importBlock2S.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,56 +18,58 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 76 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,77): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(76) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import 90 data covering existing data") - startTime = self.startTime - 5 - sqlcmd = ['import into tb1 values'] - for rid in range(1,91): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 77): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 90 data covering existing data") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 91): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(90) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(90) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2Sub.py b/tests/pytest/import_merge/importBlock2Sub.py index fe95b2d34d..deb1dc8337 100644 --- a/tests/pytest/import_merge/importBlock2Sub.py +++ b/tests/pytest/import_merge/importBlock2Sub.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,56 +18,58 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 76 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,77): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(76) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import 10 data totally repetitive") - startTime = self.startTime + 10 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 77): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(76) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2T.py b/tests/pytest/import_merge/importBlock2T.py index 4d5a4c942d..ded698d28c 100644 --- a/tests/pytest/import_merge/importBlock2T.py +++ b/tests/pytest/import_merge/importBlock2T.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,53 +18,55 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 76 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,77): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(76) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import 1 data after") - startTime = self.startTime + 76 - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+1, rid)) + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 77): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 1 data after") + startTime = self.startTime + 76 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(77) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(77) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2TO.py b/tests/pytest/import_merge/importBlock2TO.py index 1bfea0a393..ffc88c2c99 100644 --- a/tests/pytest/import_merge/importBlock2TO.py +++ b/tests/pytest/import_merge/importBlock2TO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,56 +18,58 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 76 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,77): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(76) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import 10 data later with overlap") - startTime = self.startTime + 70 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 77): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 10 data later with overlap") + startTime = self.startTime + 70 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(80) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(80) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2TPO.py b/tests/pytest/import_merge/importBlock2TPO.py index b97b4b90b0..8b6c70c32b 100644 --- a/tests/pytest/import_merge/importBlock2TPO.py +++ b/tests/pytest/import_merge/importBlock2TPO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,58 +18,60 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 76 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,61): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - for rid in range(65,81): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(76) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import 30 data later with partly overlap") - startTime = self.startTime + 55 - sqlcmd = ['import into tb1 values'] - for rid in range(1,31): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 61): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(65, 81): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 30 data later with partly overlap") + startTime = self.startTime + 55 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 31): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(85) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(85) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlockbetween.py b/tests/pytest/import_merge/importBlockbetween.py index 0dd9ed2bc2..c3482b3776 100644 --- a/tests/pytest/import_merge/importBlockbetween.py +++ b/tests/pytest/import_merge/importBlockbetween.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,59 +18,61 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("one block can import 38 records") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 76 sequential data with gap between 2 blocks") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,39): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - for rid in range(40,78): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(76) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") - tdLog.info("================= step4") - tdLog.info("import data into the gap between 2 blocks") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(39,40): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step2") + tdLog.info("import 76 sequential data with gap between 2 blocks") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 39): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + for rid in range(40, 78): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import data into the gap between 2 blocks") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(39, 40): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(77) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(77) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileH.py b/tests/pytest/import_merge/importCacheFileH.py index 66155d7b5f..cd2b3a73f1 100644 --- a/tests/pytest/import_merge/importCacheFileH.py +++ b/tests/pytest/import_merge/importCacheFileH.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,69 +18,71 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 10 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(10) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 10 data again") - startTime = self.startTime + 10 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(20) + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) - tdLog.info("================= step7") - tdLog.info("import 1 data before") - startTime = self.startTime - 1 - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(21) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step8") - tdSql.query('select * from tb1') - tdSql.checkRows(21) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileHO.py b/tests/pytest/import_merge/importCacheFileHO.py index e53722f853..2e65c337b7 100644 --- a/tests/pytest/import_merge/importCacheFileHO.py +++ b/tests/pytest/import_merge/importCacheFileHO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,72 +18,74 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 10 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(10) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 10 data again") - startTime = self.startTime + 10 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(20) + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) - tdLog.info("================= step7") - tdLog.info("import 10 data before with overlap") - startTime = self.startTime - 5 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 10 data before with overlap") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(25) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step8") - tdSql.query('select * from tb1') - tdSql.checkRows(25) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileHPO.py b/tests/pytest/import_merge/importCacheFileHPO.py index 5e869534ba..f01ebd0d60 100644 --- a/tests/pytest/import_merge/importCacheFileHPO.py +++ b/tests/pytest/import_merge/importCacheFileHPO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,74 +18,78 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 9 sequential data with gap") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - if (rid == 5): continue - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(9) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 9 sequential data with gap") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + if (rid == 5): + continue + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 9 data again with gap") - startTime = self.startTime + 10 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - if (rid == 5): continue - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(9) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(18) + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) - tdLog.info("================= step7") - tdLog.info("import 20 data before with partly overlap") - startTime = self.startTime - 3 - sqlcmd = ['import into tb1 values'] - for rid in range(1,21): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step5") + tdLog.info("import 9 data again with gap") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + if (rid == 5): + continue + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(18) + + tdLog.info("================= step7") + tdLog.info("import 20 data before with partly overlap") + startTime = self.startTime - 3 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(23) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step8") - tdSql.query('select * from tb1') - tdSql.checkRows(23) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileS.py b/tests/pytest/import_merge/importCacheFileS.py index 151ce8d9ab..0bb9107562 100644 --- a/tests/pytest/import_merge/importCacheFileS.py +++ b/tests/pytest/import_merge/importCacheFileS.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,72 +18,74 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 10 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(10) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 10 data again") - startTime = self.startTime + 10 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(20) + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) - tdLog.info("================= step7") - tdLog.info("import 30 data covering existing data") - startTime = self.startTime - 5 - sqlcmd = ['import into tb1 values'] - for rid in range(1,31): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 30 data covering existing data") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 31): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(30) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step8") - tdSql.query('select * from tb1') - tdSql.checkRows(30) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileSub.py b/tests/pytest/import_merge/importCacheFileSub.py index 6237caeec4..cd5d250968 100644 --- a/tests/pytest/import_merge/importCacheFileSub.py +++ b/tests/pytest/import_merge/importCacheFileSub.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,72 +18,74 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 10 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(10) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 10 data again") - startTime = self.startTime + 10 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(20) + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) - tdLog.info("================= step7") - tdLog.info("import 10 data totally repetitive") - startTime = self.startTime + 5 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step8") - tdSql.query('select * from tb1') - tdSql.checkRows(20) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileT.py b/tests/pytest/import_merge/importCacheFileT.py index 70d6fa09ff..be79e26bc7 100644 --- a/tests/pytest/import_merge/importCacheFileT.py +++ b/tests/pytest/import_merge/importCacheFileT.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,69 +18,71 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 10 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(10) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 10 data again") - startTime = self.startTime + 10 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(20) + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) - tdLog.info("================= step7") - tdLog.info("import 1 data later") - startTime = self.startTime + 20 - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+1, rid)) + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 1 data later") + startTime = self.startTime + 20 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, rid)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(21) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step8") - tdSql.query('select * from tb1') - tdSql.checkRows(21) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileTO.py b/tests/pytest/import_merge/importCacheFileTO.py index 16ec29bb02..dd17de3adf 100644 --- a/tests/pytest/import_merge/importCacheFileTO.py +++ b/tests/pytest/import_merge/importCacheFileTO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,72 +18,74 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 10 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(10) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 10 data again") - startTime = self.startTime + 10 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(20) + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) - tdLog.info("================= step7") - tdLog.info("import 10 data later with overlap") - startTime = self.startTime + 15 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 10 data later with overlap") + startTime = self.startTime + 15 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(25) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step8") - tdSql.query('select * from tb1') - tdSql.checkRows(25) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileTPO.py b/tests/pytest/import_merge/importCacheFileTPO.py index 8b61e1ca72..948b99ed21 100644 --- a/tests/pytest/import_merge/importCacheFileTPO.py +++ b/tests/pytest/import_merge/importCacheFileTPO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,74 +18,78 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 9 sequential data with gap") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - if (rid == 7): continue - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(9) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 9 sequential data with gap") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + if (rid == 7): + continue + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 9 data again with gap") - startTime = self.startTime + 10 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - if (rid == 7): continue - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(9) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(18) + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) - tdLog.info("================= step7") - tdLog.info("import 20 data later with partly overlap") - startTime = self.startTime + 5 - sqlcmd = ['import into tb1 values'] - for rid in range(1,21): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step5") + tdLog.info("import 9 data again with gap") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + if (rid == 7): + continue + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(18) + + tdLog.info("================= step7") + tdLog.info("import 20 data later with partly overlap") + startTime = self.startTime + 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(25) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step8") - tdSql.query('select * from tb1') - tdSql.checkRows(25) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataH2.py b/tests/pytest/import_merge/importDataH2.py index 0df7d67d88..d49abff374 100644 --- a/tests/pytest/import_merge/importDataH2.py +++ b/tests/pytest/import_merge/importDataH2.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,74 +18,77 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import %d sequential data" %(self.rows/2)) - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,self.rows/2+1): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(self.rows/2) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import %d sequential data" % (self.rows / 2)) + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, self.rows / 2 + 1): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 1 data before") - startTime = self.startTime - 1 - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime + 1, 1)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(self.rows / 2) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, 1)) + + tdLog.info("================= step7") + tdSql.execute('reset query cache') + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(self.rows / 2 + 1) + + tdLog.info("================= step8") + tdLog.info("import 10 data in batch before") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime - rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step9") + tdSql.execute('reset query cache') + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(self.rows / 2 + 11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step7") - tdSql.execute('reset query cache') - tdSql.query('select * from tb1 order by ts desc') - tdSql.checkRows(self.rows/2 + 1) - - tdLog.info("================= step8") - tdLog.info("import 10 data in batch before") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime - rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step9") - tdSql.execute('reset query cache') - tdSql.query('select * from tb1 order by ts desc') - tdSql.checkRows(self.rows/2 + 11) - - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataHO.py b/tests/pytest/import_merge/importDataHO.py index 84dcb82a57..0483e6844c 100644 --- a/tests/pytest/import_merge/importDataHO.py +++ b/tests/pytest/import_merge/importDataHO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,63 +18,67 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 - self.rowsPerTable = 20 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + self.rowsPerTable = 20 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import %d sequential data" %self.rowsPerTable) - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,self.rowsPerTable+1): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select count(*) from tb1') - tdSql.checkData(0, 0, self.rowsPerTable) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import %d sequential data" % self.rowsPerTable) + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, self.rowsPerTable + 1): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 6 data before with overlap") - startTime = self.startTime - 3 - sqlcmd = ['import into tb1 values'] - for rid in range(1,7): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select count(*) from tb1') + tdSql.checkData(0, 0, self.rowsPerTable) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 6 data before with overlap") + startTime = self.startTime - 3 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 7): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(self.rowsPerTable + 3) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1 order by ts desc') - tdSql.checkRows(self.rowsPerTable + 3) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataHO2.py b/tests/pytest/import_merge/importDataHO2.py index 8077849cc1..ab7044d2a7 100644 --- a/tests/pytest/import_merge/importDataHO2.py +++ b/tests/pytest/import_merge/importDataHO2.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,63 +18,67 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 - self.rowsPerTable = 100 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + self.rowsPerTable = 100 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import %d sequential data" %self.rowsPerTable) - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,self.rowsPerTable+1): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select count(*) from tb1') - tdSql.checkData(0, 0, self.rowsPerTable) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import %d sequential data" % self.rowsPerTable) + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, self.rowsPerTable + 1): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 6 data before with overlap") - startTime = self.startTime - 3 - sqlcmd = ['import into tb1 values'] - for rid in range(6, 0, -1): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select count(*) from tb1') + tdSql.checkData(0, 0, self.rowsPerTable) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 6 data before with overlap") + startTime = self.startTime - 3 + sqlcmd = ['import into tb1 values'] + for rid in range(6, 0, -1): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(self.rowsPerTable + 3) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1 order by ts desc') - tdSql.checkRows(self.rowsPerTable+3) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataHPO.py b/tests/pytest/import_merge/importDataHPO.py index a73ae9c103..f165bd7b5a 100644 --- a/tests/pytest/import_merge/importDataHPO.py +++ b/tests/pytest/import_merge/importDataHPO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,65 +18,69 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 - self.rowsPerTable = 20 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + self.rowsPerTable = 20 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import %d sequential data" %self.rowsPerTable) - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,10): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - for rid in range(14,self.rowsPerTable+5): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select count(*) from tb1') - tdSql.checkData(0, 0, self.rowsPerTable) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import %d sequential data" % self.rowsPerTable) + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 10): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(14, self.rowsPerTable + 5): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 20 data before with partly overlap") - startTime = self.startTime - 4 - sqlcmd = ['import into tb1 values'] - for rid in range(1,21): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select count(*) from tb1') + tdSql.checkData(0, 0, self.rowsPerTable) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 20 data before with partly overlap") + startTime = self.startTime - 4 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(self.rowsPerTable + 8) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(self.rowsPerTable+8) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastH.py b/tests/pytest/import_merge/importDataLastH.py index c867703e13..319fd40677 100644 --- a/tests/pytest/import_merge/importDataLastH.py +++ b/tests/pytest/import_merge/importDataLastH.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,59 +18,63 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than %d rows less than %d rows will go to data and last file" %(self.rows, 10+self.rows)) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 205 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,206): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(205) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than %d rows less than %d rows will go to data and last file" % + (self.rows, 10 + self.rows)) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 206): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 1 data before") - startTime = self.startTime - 1 - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime + 1, 1)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, 1)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(206) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(206) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastHO.py b/tests/pytest/import_merge/importDataLastHO.py index 68e858f781..5a71c5db65 100644 --- a/tests/pytest/import_merge/importDataLastHO.py +++ b/tests/pytest/import_merge/importDataLastHO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,62 +18,66 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than %d rows less than %d rows will go to data and last file" %(self.rows, 10+self.rows)) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 205 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,206): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(205) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than %d rows less than %d rows will go to data and last file" % + (self.rows, 10 + self.rows)) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 206): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 5 data before with overlap") - startTime = self.startTime - 2 - sqlcmd = ['import into tb1 values'] - for rid in range(1,6): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 5 data before with overlap") + startTime = self.startTime - 2 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 6): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(207) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(207) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastHPO.py b/tests/pytest/import_merge/importDataLastHPO.py index 29b71a7a8b..f2c95cbd4d 100644 --- a/tests/pytest/import_merge/importDataLastHPO.py +++ b/tests/pytest/import_merge/importDataLastHPO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,64 +18,68 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than %d rows less than %d rows will go to data and last file" %(self.rows, 10+self.rows)) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 205 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - for rid in range(14,209): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(205) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than %d rows less than %d rows will go to data and last file" % + (self.rows, 10 + self.rows)) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(14, 209): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 20 data before with partly overlap") - startTime = self.startTime - 2 - sqlcmd = ['import into tb1 values'] - for rid in range(1,21): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 20 data before with partly overlap") + startTime = self.startTime - 2 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(210) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(210) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastS.py b/tests/pytest/import_merge/importDataLastS.py index cb61f5d088..929e02dd1e 100644 --- a/tests/pytest/import_merge/importDataLastS.py +++ b/tests/pytest/import_merge/importDataLastS.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,62 +18,66 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than %d rows less than %d rows will go to data and last file" %(self.rows, 10+self.rows)) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 205 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,206): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(205) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than %d rows less than %d rows will go to data and last file" % + (self.rows, 10 + self.rows)) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 206): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 250 data covering the existing data") - startTime = self.startTime - 15 - sqlcmd = ['import into tb1 values'] - for rid in range(1,251): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 250 data covering the existing data") + startTime = self.startTime - 15 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 251): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(250) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(250) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastSub.py b/tests/pytest/import_merge/importDataLastSub.py index 2b30c7c7c8..158fa0fb3c 100644 --- a/tests/pytest/import_merge/importDataLastSub.py +++ b/tests/pytest/import_merge/importDataLastSub.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,62 +18,66 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than %d rows less than %d rows will go to data and last file" %(self.rows, 10+self.rows)) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 205 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,206): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(205) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than %d rows less than %d rows will go to data and last file" % + (self.rows, 10 + self.rows)) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 206): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 10 data totally repetitive") - startTime = self.startTime + 10 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(205) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastT.py b/tests/pytest/import_merge/importDataLastT.py index fecf64e495..9bc90a8275 100644 --- a/tests/pytest/import_merge/importDataLastT.py +++ b/tests/pytest/import_merge/importDataLastT.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,55 +18,59 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than %d rows less than %d rows will go to data and last file" %(self.rows, 10+self.rows)) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 205 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,206): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(205) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than %d rows less than %d rows will go to data and last file" % + (self.rows, 10 + self.rows)) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 206): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 1 data later") - startTime = self.startTime + 205 - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime + 1, 1)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data later") + startTime = self.startTime + 205 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, 1)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(206) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(206) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastTO.py b/tests/pytest/import_merge/importDataLastTO.py index 700c2b8f04..0c93ac430b 100644 --- a/tests/pytest/import_merge/importDataLastTO.py +++ b/tests/pytest/import_merge/importDataLastTO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,58 +18,62 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than %d rows less than %d rows will go to data and last file" %(self.rows, 10+self.rows)) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 205 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,206): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(205) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than %d rows less than %d rows will go to data and last file" % + (self.rows, 10 + self.rows)) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 206): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 5 data later with overlap") - startTime = self.startTime + 203 - sqlcmd = ['import into tb1 values'] - for rid in range(1,6): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 5 data later with overlap") + startTime = self.startTime + 203 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 6): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(208) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(208) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastTPO.py b/tests/pytest/import_merge/importDataLastTPO.py index 2ae43378e8..188e93e0db 100644 --- a/tests/pytest/import_merge/importDataLastTPO.py +++ b/tests/pytest/import_merge/importDataLastTPO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,60 +18,64 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than %d rows less than %d rows will go to data and last file" %(self.rows, 10+self.rows)) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 205 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,196): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - for rid in range(200,210): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(205) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than %d rows less than %d rows will go to data and last file" % + (self.rows, 10 + self.rows)) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 196): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(200, 210): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 20 data later with partly overlap") - startTime = self.startTime + 192 - sqlcmd = ['import into tb1 values'] - for rid in range(1,21): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 20 data later with partly overlap") + startTime = self.startTime + 192 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(212) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(212) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataS.py b/tests/pytest/import_merge/importDataS.py index 0a1849f5f7..65d4087c3d 100644 --- a/tests/pytest/import_merge/importDataS.py +++ b/tests/pytest/import_merge/importDataS.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,58 +18,62 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 20 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,21): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(20) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 20 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 30 data covering the existing data") - startTime = self.startTime - 5 - sqlcmd = ['import into tb1 values'] - for rid in range(1,31): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 30 data covering the existing data") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 31): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(30) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(30) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataSub.py b/tests/pytest/import_merge/importDataSub.py index cd7b37b921..4bf85f2bdd 100644 --- a/tests/pytest/import_merge/importDataSub.py +++ b/tests/pytest/import_merge/importDataSub.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,63 +18,67 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import %d sequential data" %(self.rows/2)) - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,self.rows/2+1): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(self.rows/2) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import %d sequential data" % (self.rows / 2)) + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, self.rows / 2 + 1): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 10 data totally repetitive") - startTime = self.startTime + 10 - sqlcmd = ['import into tb1 values'] - for rid in range(1,11): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(self.rows / 2) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step9") + tdSql.execute('reset query cache') + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(self.rows / 2) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step9") - tdSql.execute('reset query cache') - tdSql.query('select * from tb1 order by ts desc') - tdSql.checkRows(self.rows/2) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataT.py b/tests/pytest/import_merge/importDataT.py index 8054d0a13c..66016c5555 100644 --- a/tests/pytest/import_merge/importDataT.py +++ b/tests/pytest/import_merge/importDataT.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,55 +18,59 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 20 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,21): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(20) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 20 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 1 data later") - startTime = self.startTime + 20 - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime + 1, 1)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data later") + startTime = self.startTime + 20 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, 1)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(21) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(21) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataTO.py b/tests/pytest/import_merge/importDataTO.py index e808c7382d..a3c17b2846 100644 --- a/tests/pytest/import_merge/importDataTO.py +++ b/tests/pytest/import_merge/importDataTO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,58 +18,62 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 20 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,21): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(20) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 20 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 6 data later with overlap") - startTime = self.startTime + 18 - sqlcmd = ['import into tb1 values'] - for rid in range(1,7): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 6 data later with overlap") + startTime = self.startTime + 18 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 7): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(24) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(24) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataTPO.py b/tests/pytest/import_merge/importDataTPO.py index c9d454103d..20eb41cc08 100644 --- a/tests/pytest/import_merge/importDataTPO.py +++ b/tests/pytest/import_merge/importDataTPO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,61 +18,65 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("More than 10 rows less than %d rows will go to data file" %self.rows) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 20 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,18): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - for rid in range(22,25): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(20) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) - tdLog.sleep(5) + tdLog.info("================= step2") + tdLog.info("import 20 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 18): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(22, 25): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 20 data later with partly overlap") - startTime = self.startTime + 15 - sqlcmd = ['import into tb1 values'] - for rid in range(1,21): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + tdLog.sleep(5) + + tdLog.info("================= step5") + tdLog.info("import 20 data later with partly overlap") + startTime = self.startTime + 15 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(35) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(35) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHORestart.py b/tests/pytest/import_merge/importHORestart.py index 0d7bfab30a..cfbfa61c90 100644 --- a/tests/pytest/import_merge/importHORestart.py +++ b/tests/pytest/import_merge/importHORestart.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,49 +18,55 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.prepare() + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, i int)') + tdSql.prepare() - tdLog.info("================= step2") - tdLog.info("import 10 sequential data") - startTime = self.startTime - for rid in range(1,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(10) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') - tdLog.info("================= step4") - tdLog.info("import 5 data before with overlap") - startTime = self.startTime - 2 - for rid in range(1,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) - tdLog.info("================= step5") - tdDnodes.forcestop(1) - tdDnodes.start(1) - tdLog.sleep(10) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 5 data before with overlap") + startTime = self.startTime - 2 + for rid in range(1, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(12) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(12) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHPORestart.py b/tests/pytest/import_merge/importHPORestart.py index 3be6e74a14..7e96d44a1a 100644 --- a/tests/pytest/import_merge/importHPORestart.py +++ b/tests/pytest/import_merge/importHPORestart.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,52 +18,60 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.prepare() + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, i int)') + tdSql.prepare() - tdLog.info("================= step2") - tdLog.info("import 8 sequential data with gap") - startTime = self.startTime - for rid in range(1,4): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) - for rid in range(6,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) - startTime += 1 - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(8) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') - tdLog.info("================= step4") - tdLog.info("import 8 data before with partly overlap") - startTime = self.startTime - 2 - for rid in range(1, 9): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + tdLog.info("================= step2") + tdLog.info("import 8 sequential data with gap") + startTime = self.startTime + for rid in range(1, 4): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + for rid in range(6, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + startTime += 1 - tdLog.info("================= step5") - tdDnodes.forcestop(1) - tdDnodes.start(1) - tdLog.sleep(10) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(8) + + tdLog.info("================= step4") + tdLog.info("import 8 data before with partly overlap") + startTime = self.startTime - 2 + for rid in range(1, 9): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(12) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(12) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHRestart.py b/tests/pytest/import_merge/importHRestart.py index 3b885f7d45..aa1783977e 100644 --- a/tests/pytest/import_merge/importHRestart.py +++ b/tests/pytest/import_merge/importHRestart.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,48 +18,52 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.prepare() + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, i int)') + tdSql.prepare() - tdLog.info("================= step2") - tdLog.info("import 10 sequential data") - startTime = self.startTime - for rid in range(1,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(10) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') - tdLog.info("================= step4") - tdLog.info("import 1 data before") - startTime = self.startTime - 1 - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) - tdLog.info("================= step5") - tdDnodes.forcestop(1) - tdDnodes.start(1) - tdLog.sleep(10) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(11) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHead.py b/tests/pytest/import_merge/importHead.py index 6b34e2247f..6971986ebc 100644 --- a/tests/pytest/import_merge/importHead.py +++ b/tests/pytest/import_merge/importHead.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,51 +18,53 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 10 sequential data") - startTime = self.startTime - for rid in range(1,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) - startTime += 1 - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(10) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("================= step4") - tdLog.info("import 1 data before") - startTime = self.startTime - 1 - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(11) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHeadOverlap.py b/tests/pytest/import_merge/importHeadOverlap.py index ed17615957..df5f07b5a2 100644 --- a/tests/pytest/import_merge/importHeadOverlap.py +++ b/tests/pytest/import_merge/importHeadOverlap.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,46 +18,48 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.prepare() + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdSql.prepare() - tdLog.info("================= step2") - tdLog.info("import 10 sequential data") - startTime = self.startTime - for rid in range(1,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) - startTime += 1 - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(10) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("================= step4") - tdLog.info("import 5 data before with overlap") - startTime = self.startTime - 2 - for rid in range(1,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) - startTime += 1 + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 5 data before with overlap") + startTime = self.startTime - 2 + for rid in range(1, 11): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(12) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(12) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHeadPartOverlap.py b/tests/pytest/import_merge/importHeadPartOverlap.py index a002edcd5a..8c9885e22f 100644 --- a/tests/pytest/import_merge/importHeadPartOverlap.py +++ b/tests/pytest/import_merge/importHeadPartOverlap.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,50 +18,52 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.prepare() + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, i int)') + tdSql.prepare() - tdLog.info("================= step2") - tdLog.info("import 8 sequential data with gap") - startTime = self.startTime - for rid in range(1,4): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) - startTime += 1 - startTime += 2 - for rid in range(6,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) - startTime += 1 - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(8) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') - tdLog.info("================= step4") - tdLog.info("import 8 data before with partly overlap") - startTime = self.startTime - 2 - for rid in range(1, 9): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) - startTime += 1 + tdLog.info("================= step2") + tdLog.info("import 8 sequential data with gap") + startTime = self.startTime + for rid in range(1, 4): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + startTime += 2 + for rid in range(6, 11): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(8) + + tdLog.info("================= step4") + tdLog.info("import 8 data before with partly overlap") + startTime = self.startTime - 2 + for rid in range(1, 9): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(12) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(12) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastH.py b/tests/pytest/import_merge/importLastH.py index d5096845c7..c69f453971 100644 --- a/tests/pytest/import_merge/importLastH.py +++ b/tests/pytest/import_merge/importLastH.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,55 +18,57 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("less than 10 rows will go to last file") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 5 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,6): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(5) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 6): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 1 data before") - startTime = self.startTime - 1 - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime + 1, 1)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, 1)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(6) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(6) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastHO.py b/tests/pytest/import_merge/importLastHO.py index 13bfcaea70..ec930d1807 100644 --- a/tests/pytest/import_merge/importLastHO.py +++ b/tests/pytest/import_merge/importLastHO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,58 +18,60 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("less than 10 rows will go to last file") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 5 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,6): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(5) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 6): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 4 data before with overlap") - startTime = self.startTime - 2 - sqlcmd = ['import into tb1 values'] - for rid in range(1,5): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 4 data before with overlap") + startTime = self.startTime - 2 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 5): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(7) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(7) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastHPO.py b/tests/pytest/import_merge/importLastHPO.py index 73a0a1d935..9603a7b852 100644 --- a/tests/pytest/import_merge/importLastHPO.py +++ b/tests/pytest/import_merge/importLastHPO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,60 +18,62 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("less than 10 rows will go to last file") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 6 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,4): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - for rid in range(6,9): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(6) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 6 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 4): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(6, 9): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 8 data before with partly overlap") - startTime = self.startTime - 2 - sqlcmd = ['import into tb1 values'] - for rid in range(1,9): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(6) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 8 data before with partly overlap") + startTime = self.startTime - 2 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 9): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(10) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastS.py b/tests/pytest/import_merge/importLastS.py index 16ce3b464c..7dbe74e2ca 100644 --- a/tests/pytest/import_merge/importLastS.py +++ b/tests/pytest/import_merge/importLastS.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,57 +18,59 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("less than 10 rows will go to last file") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 5 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,6): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(5) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 6): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 20 data covering existing data") - startTime = self.startTime - 10 - for rid in range(1,21): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 20 data covering existing data") + startTime = self.startTime - 10 + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(20) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastSub.py b/tests/pytest/import_merge/importLastSub.py index 68d1a57f97..f028ba5fd7 100644 --- a/tests/pytest/import_merge/importLastSub.py +++ b/tests/pytest/import_merge/importLastSub.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,58 +18,60 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("less than 10 rows will go to last file") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 5 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,6): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(5) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 6): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 3 data totally repetitive") - startTime = self.startTime + 1 - sqlcmd = ['import into tb1 values'] - for rid in range(1,4): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 3 data totally repetitive") + startTime = self.startTime + 1 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 4): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(5) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastT.py b/tests/pytest/import_merge/importLastT.py index 4c33b4dd01..3fe4e0006c 100644 --- a/tests/pytest/import_merge/importLastT.py +++ b/tests/pytest/import_merge/importLastT.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,59 +18,61 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("less than 10 rows will go to last file") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 5 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,6): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(5) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 6): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 1 data later") - startTime = self.startTime + 5 - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime + 1, 1)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data later") + startTime = self.startTime + 5 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, 1)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(6) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(6) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastTO.py b/tests/pytest/import_merge/importLastTO.py index e89fa342ae..76e5016bdb 100644 --- a/tests/pytest/import_merge/importLastTO.py +++ b/tests/pytest/import_merge/importLastTO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,62 +18,64 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("less than 10 rows will go to last file") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 5 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,6): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(5) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 6): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 4 data later with overlap") - startTime = self.startTime + 3 - sqlcmd = ['import into tb1 values'] - for rid in range(1,5): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 4 data later with overlap") + startTime = self.startTime + 3 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 5): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(7) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(7) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastTPO.py b/tests/pytest/import_merge/importLastTPO.py index 2b673bf152..08f4168063 100644 --- a/tests/pytest/import_merge/importLastTPO.py +++ b/tests/pytest/import_merge/importLastTPO.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,63 +18,65 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - self.rows = 200 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db rows %d' %self.rows) - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("less than 10 rows will go to last file") + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 6 sequential data") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(1,4): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - for rid in range(6,9): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(6) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") - tdLog.info("================= step4") - tdDnodes.stop(1) - tdLog.sleep(5) - tdDnodes.start(1) + tdLog.info("================= step2") + tdLog.info("import 6 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 4): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(6, 9): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) - tdLog.info("================= step5") - tdLog.info("import 8 data later with partly overlap") - startTime = self.startTime + 2 - for rid in range(1,9): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(6) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 8 data later with partly overlap") + startTime = self.startTime + 2 + for rid in range(1, 9): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(10) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importSRestart.py b/tests/pytest/import_merge/importSRestart.py index 5636ab5015..0771b8bf9c 100644 --- a/tests/pytest/import_merge/importSRestart.py +++ b/tests/pytest/import_merge/importSRestart.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,56 +18,62 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, i int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 10 sequential data") - startTime = self.startTime - for rid in range(1,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(10) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') - tdLog.info("================= step4") - tdLog.info("import 20 data cover existing data") - startTime = self.startTime - 5 - for rid in range(1,21): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) - tdLog.info("================= step5") - tdDnodes.forcestop(1) - tdDnodes.start(1) - tdLog.sleep(10) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 20 data cover existing data") + startTime = self.startTime - 5 + for rid in range(1, 21): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(20) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importSpan.py b/tests/pytest/import_merge/importSpan.py index e57062a7b5..736c4bad64 100644 --- a/tests/pytest/import_merge/importSpan.py +++ b/tests/pytest/import_merge/importSpan.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,51 +18,57 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 10 sequential data") - startTime = self.startTime - for rid in range(1,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(10) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("================= step4") - tdLog.info("import 20 data cover existing data") - startTime = self.startTime - 5 - for rid in range(1,21): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 20 data cover existing data") + startTime = self.startTime - 5 + for rid in range(1, 21): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(20) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importSubRestart.py b/tests/pytest/import_merge/importSubRestart.py index aae79d787b..f7f33d32c1 100644 --- a/tests/pytest/import_merge/importSubRestart.py +++ b/tests/pytest/import_merge/importSubRestart.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,56 +18,62 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, i int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 20 sequential data") - startTime = self.startTime - for rid in range(1,21): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(20) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') - tdLog.info("================= step4") - tdLog.info("import 10 data totally repetitive") - startTime = self.startTime + 5 - for rid in range(1,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + tdLog.info("================= step2") + tdLog.info("import 20 sequential data") + startTime = self.startTime + for rid in range(1, 21): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) - tdLog.info("================= step5") - tdDnodes.forcestop(1) - tdDnodes.start(1) - tdLog.sleep(10) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step4") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + 5 + for rid in range(1, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(20) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTORestart.py b/tests/pytest/import_merge/importTORestart.py index 6a0621568d..194756cd12 100644 --- a/tests/pytest/import_merge/importTORestart.py +++ b/tests/pytest/import_merge/importTORestart.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,56 +18,62 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, i int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 10 sequential data") - startTime = self.startTime - for rid in range(1,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(10) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') - tdLog.info("================= step4") - tdLog.info("import 6 data after with overlap") - startTime = self.startTime + 8 - for rid in range(1,7): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) - tdLog.info("================= step5") - tdDnodes.forcestop(1) - tdDnodes.start(1) - tdLog.sleep(10) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 6 data after with overlap") + startTime = self.startTime + 8 + for rid in range(1, 7): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(14) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(14) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTPORestart.py b/tests/pytest/import_merge/importTPORestart.py index 0ccb481890..36d4b64390 100644 --- a/tests/pytest/import_merge/importTPORestart.py +++ b/tests/pytest/import_merge/importTPORestart.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,58 +18,66 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, i int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 8 sequential data with gap") - startTime = self.startTime - for rid in range(1,6): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) - for rid in range(8,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(8) + tdLog.info("================= step2") + tdLog.info("import 8 sequential data with gap") + startTime = self.startTime + for rid in range(1, 6): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + for rid in range(8, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) - tdLog.info("================= step4") - tdLog.info("import 8 data after with partly overlap") - startTime = self.startTime + 3 - for rid in range(1,9): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(8) - tdLog.info("================= step5") - tdDnodes.forcestop(1) - tdDnodes.start(1) - tdLog.sleep(10) + tdLog.info("================= step4") + tdLog.info("import 8 data after with partly overlap") + startTime = self.startTime + 3 + for rid in range(1, 9): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(11) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTRestart.py b/tests/pytest/import_merge/importTRestart.py index a80a3df0c7..9308518d8c 100644 --- a/tests/pytest/import_merge/importTRestart.py +++ b/tests/pytest/import_merge/importTRestart.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,55 +18,59 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, i int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 10 sequential data") - startTime = self.startTime - for rid in range(1,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(10) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') - tdLog.info("================= step4") - tdLog.info("import 1 data after") - startTime = self.startTime + 11 - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) - tdLog.info("================= step5") - tdDnodes.forcestop(1) - tdDnodes.start(1) - tdLog.sleep(10) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 1 data after") + startTime = self.startTime + 11 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step6") - tdSql.query('select * from tb1') - tdSql.checkRows(11) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTail.py b/tests/pytest/import_merge/importTail.py index 4cfa248ecb..a80db730a0 100644 --- a/tests/pytest/import_merge/importTail.py +++ b/tests/pytest/import_merge/importTail.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,51 +18,53 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 10 sequential data") - startTime = self.startTime - for rid in range(1,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) - startTime += 1 - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(10) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("================= step4") - tdLog.info("import 1 data after") - startTime += 1 - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 1 data after") + startTime += 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(11) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTailOverlap.py b/tests/pytest/import_merge/importTailOverlap.py index 6cbf9d2e14..98596d2f77 100644 --- a/tests/pytest/import_merge/importTailOverlap.py +++ b/tests/pytest/import_merge/importTailOverlap.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,53 +18,55 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 10 sequential data") - startTime = self.startTime - for rid in range(1,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) - startTime += 1 - - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(10) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("================= step4") - tdLog.info("import 6 data after with overlap") - startTime = self.startTime + 8 - for rid in range(1,7): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime, rid)) - startTime += 1 + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 6 data after with overlap") + startTime = self.startTime + 8 + for rid in range(1, 7): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(14) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(14) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTailPartOverlap.py b/tests/pytest/import_merge/importTailPartOverlap.py index 473b0314d8..0263114a25 100644 --- a/tests/pytest/import_merge/importTailPartOverlap.py +++ b/tests/pytest/import_merge/importTailPartOverlap.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,53 +18,61 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db') - tdSql.execute('use db') + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') - tdLog.info("================= step2") - tdLog.info("import 8 sequential data with gap") - startTime = self.startTime - for rid in range(1,6): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) - for rid in range(8,11): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') - tdLog.info("================= step3") - tdSql.query('select * from tb1') - tdSql.checkRows(8) + tdLog.info("================= step2") + tdLog.info("import 8 sequential data with gap") + startTime = self.startTime + for rid in range(1, 6): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + for rid in range(8, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) - tdLog.info("================= step4") - tdLog.info("import 8 data after with partly overlap") - startTime = self.startTime + 3 - for rid in range(1,9): - tdSql.execute('import into tb1 values(%ld, %d)' %(startTime+rid, rid)) + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(8) + + tdLog.info("================= step4") + tdLog.info("import 8 data after with partly overlap") + startTime = self.startTime + 3 + for rid in range(1, 9): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step5") - tdSql.query('select * from tb1') - tdSql.checkRows(11) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importToCommit.py b/tests/pytest/import_merge/importToCommit.py index 9e14e3cd17..7a408bcdce 100644 --- a/tests/pytest/import_merge/importToCommit.py +++ b/tests/pytest/import_merge/importToCommit.py @@ -1,15 +1,15 @@ ################################################################### - # Copyright (c) 2016 by TAOS Technologies, Inc. - # All rights reserved. - # - # This file is proprietary and confidential to TAOS Technologies. - # No part of this file may be reproduced, stored, transmitted, - # disclosed or used in any form or by any means other than as - # expressly provided by the written permission from Jianhui Tao - # +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# ################################################################### -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import taos @@ -18,64 +18,68 @@ from util.cases import * from util.sql import * from util.dnodes import * + class TDTestCase: - def init(self, conn): - tdLog.debug("start to execute %s" % __file__) - tdSql.init(conn.cursor()) - - def run(self): - self.ntables = 1 - self.startTime = 1520000010000 - - tdDnodes.stop(1) - tdDnodes.deploy(1) - tdDnodes.start(1) - tdSql.execute('reset query cache') - tdSql.execute('drop database if exists db') - tdSql.execute('create database db cache 512 tables 10') - tdSql.execute('use db') + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) - tdLog.info("================= step1") - tdLog.info("create 1 table") - tdSql.execute('create table tb1 (ts timestamp, i int)') - tdLog.info("one block can import 38 records and totally there are 40 blocks") + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 - tdLog.info("================= step2") - tdLog.info('insert data until the first commit') - dnodesDir = tdDnodes.getDnodesRootDir() - dataDir = dnodesDir + '/dnode1/data/data' - startTime = self.startTime - rid0 = 1 - while (True): - sqlcmd = 'insert into tb1 values(%ld, %d)' %(startTime+rid0*2, rid0) - tdSql.execute(sqlcmd) - rid0 += 1 - vnodes = os.listdir(dataDir) - if (len(vnodes) > 0): - tdLog.info("data is committed, stop inserting") - break + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512 tables 10') + tdSql.execute('use db') - tdLog.info("================= step5") - tdLog.info("import 1 data before ") - startTime = self.startTime - sqlcmd = ['import into tb1 values'] - for rid in range(3,4): - sqlcmd.append('(%ld, %d)' %(startTime+rid, rid)) - tdSql.execute(" ".join(sqlcmd)) + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + tdLog.info( + "one block can import 38 records and totally there are 40 blocks") - tdLog.info("================= step6") - tdSql.execute('reset query cache') - tdSql.query('select * from tb1 order by ts desc') - tdSql.checkRows(rid0-1+1) + tdLog.info("================= step2") + tdLog.info('insert data until the first commit') + dnodesDir = tdDnodes.getDnodesRootDir() + dataDir = dnodesDir + '/dnode1/data/data' + startTime = self.startTime + rid0 = 1 + while (True): + sqlcmd = 'insert into tb1 values(%ld, %d)' % ( + startTime + rid0 * 2, rid0) + tdSql.execute(sqlcmd) + rid0 += 1 + vnodes = os.listdir(dataDir) + if (len(vnodes) > 0): + tdLog.info("data is committed, stop inserting") + break + + tdLog.info("================= step5") + tdLog.info("import 1 data before ") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(3, 4): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.execute('reset query cache') + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(rid0 - 1 + 1) + + tdLog.info("================= step7") + tdSql.execute('reset query cache') + tdSql.query('select count(*) from tb1') + tdSql.checkData(0, 0, rid0 - 1 + 1) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) - tdLog.info("================= step7") - tdSql.execute('reset query cache') - tdSql.query('select count(*) from tb1') - tdSql.checkData(0, 0, rid0-1+1) - def stop(self): - tdSql.close() - tdLog.success("%s successfully executed" % __file__) - tdCases.addWindows(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/query/tbname.py b/tests/pytest/query/tbname.py index d2799efa25..5ea89fff82 100644 --- a/tests/pytest/query/tbname.py +++ b/tests/pytest/query/tbname.py @@ -26,12 +26,14 @@ class TDTestCase: def run(self): tdSql.prepare() - tdSql.execute('create table cars (ts timestamp, speed int) tags(id int)') + tdSql.execute( + 'create table cars (ts timestamp, speed int) tags(id int)') tdSql.execute("create table carzero using cars tags(0)") tdSql.execute("create table carone using cars tags(1)") tdSql.execute("create table cartwo using cars tags(2)") - tdSql.execute("insert into carzero values(now, 100) carone values(now, 110)") + tdSql.execute( + "insert into carzero values(now, 100) carone values(now, 110)") tdSql.query("select * from cars where tbname in ('carzero', 'carone')") tdSql.checkRows(2) @@ -39,13 +41,16 @@ class TDTestCase: tdSql.query("select * from cars where tbname in ('carzero', 'cartwo')") tdSql.checkRows(1) - tdSql.query("select * from cars where id=1 or tbname in ('carzero', 'cartwo')") + tdSql.query( + "select * from cars where id=1 or tbname in ('carzero', 'cartwo')") tdSql.checkRows(2) - tdSql.query("select * from cars where id=1 and tbname in ('carzero', 'cartwo')") + tdSql.query( + "select * from cars where id=1 and tbname in ('carzero', 'cartwo')") tdSql.checkRows(0) - tdSql.query("select * from cars where id=0 and tbname in ('carzero', 'cartwo')") + tdSql.query( + "select * from cars where id=0 and tbname in ('carzero', 'cartwo')") tdSql.checkRows(1) """ diff --git a/tests/pytest/util/sql.py b/tests/pytest/util/sql.py index edfee4ddb1..15567ec3ca 100644 --- a/tests/pytest/util/sql.py +++ b/tests/pytest/util/sql.py @@ -92,10 +92,10 @@ class TDSql: if data is None: tdLog.info("sql:%.40s, row:%d col:%d data:%s == expect:%s" % - (self.sql, row, col, self.queryResult[row][col], data)) + (self.sql, row, col, self.queryResult[row][col], data)) else: tdLog.info("sql:%.40s, row:%d col:%d data:%s == expect:%d" % - (self.sql, row, col, self.queryResult[row][col], data)) + (self.sql, row, col, self.queryResult[row][col], data)) def getData(self, row, col): if row < 0: From 050dd145e130be7ce8a80db7bbc4caca4359e4c3 Mon Sep 17 00:00:00 2001 From: slguan Date: Mon, 27 Apr 2020 15:42:27 +0800 Subject: [PATCH 06/15] [TD-184] handle bugs during wal synchronization --- src/vnode/src/vnodeMain.c | 2 +- src/vnode/src/vnodeWrite.c | 12 ++++++------ src/wal/src/walMain.c | 2 +- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/vnode/src/vnodeMain.c b/src/vnode/src/vnodeMain.c index cac1030a93..9ae0588771 100644 --- a/src/vnode/src/vnodeMain.c +++ b/src/vnode/src/vnodeMain.c @@ -197,7 +197,7 @@ int32_t vnodeOpen(int32_t vnode, char *rootDir) { syncInfo.vgId = pVnode->vgId; syncInfo.version = pVnode->version; syncInfo.syncCfg = pVnode->syncCfg; - sprintf(syncInfo.path, "%s/tsdb/", rootDir); + sprintf(syncInfo.path, "%s", rootDir); syncInfo.ahandle = pVnode; syncInfo.getWalInfo = vnodeGetWalInfo; syncInfo.getFileInfo = vnodeGetFileInfo; diff --git a/src/vnode/src/vnodeWrite.c b/src/vnode/src/vnodeWrite.c index 26808f2e4e..6007379680 100644 --- a/src/vnode/src/vnodeWrite.c +++ b/src/vnode/src/vnodeWrite.c @@ -74,15 +74,15 @@ int32_t vnodeProcessWrite(void *param1, int qtype, void *param2, void *item) { // write into WAL code = walWrite(pVnode->wal, pHead); - if ( code < 0) return code; - + if (code < 0) return code; + + int32_t syncCode = syncForwardToPeer(pVnode->sync, pHead, item); + if (syncCode < 0) return syncCode; + code = (*vnodeProcessWriteMsgFp[pHead->msgType])(pVnode, pHead->cont, item); if (code < 0) return code; - if (pVnode->syncCfg.replica > 1) - code = syncForwardToPeer(pVnode->sync, pHead, item); - - return code; + return syncCode; } static int32_t vnodeProcessSubmitMsg(SVnodeObj *pVnode, void *pCont, SRspRet *pRet) { diff --git a/src/wal/src/walMain.c b/src/wal/src/walMain.c index 6a4ecee1c3..ad33391b98 100644 --- a/src/wal/src/walMain.c +++ b/src/wal/src/walMain.c @@ -269,7 +269,7 @@ int walGetWalFile(void *handle, char *name, uint32_t *index) { if (*index < first && *index > pWal->id) { code = -1; // index out of range } else { - sprintf(name, "%s/%s%d", pWal->path, walPrefix, *index); + sprintf(name, "wal/%s%d", walPrefix, *index); code = (*index == pWal->id) ? 0:1; } From 736ddfbbff3153a7d91eeadb98d902f7df05f11c Mon Sep 17 00:00:00 2001 From: localvar Date: Sun, 19 Apr 2020 18:52:24 +0800 Subject: [PATCH 07/15] TD-153: add exception handling --- src/util/inc/exception.h | 52 ++++++ src/util/inc/tbuffer.h | 206 ++++++++++++++--------- src/util/src/exception.c | 20 +++ src/util/src/tbuffer.c | 343 +++++++++++++++++++++++++++++++++++---- 4 files changed, 507 insertions(+), 114 deletions(-) create mode 100644 src/util/inc/exception.h create mode 100644 src/util/src/exception.c diff --git a/src/util/inc/exception.h b/src/util/inc/exception.h new file mode 100644 index 0000000000..5c9506f802 --- /dev/null +++ b/src/util/inc/exception.h @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2019 TAOS Data, Inc. + * + * This program is free software: you can use, redistribute, and/or modify + * it under the terms of the GNU Affero General Public License, version 3 + * or later ("AGPL"), as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +#ifndef TDENGINE_EXCEPTION_H +#define TDENGINE_EXCEPTION_H + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct SExceptionNode { + struct SExceptionNode* prev; + jmp_buf jb; + int code; +} SExceptionNode; + +void expPushNode( SExceptionNode* node ); +int expPopNode(); +void expThrow( int code ); + +#define TRY do { \ + SExceptionNode expNode = { 0 }; \ + expPushNode( &expNode ); \ + if( setjmp(expNode.jb) == 0 ) { + +#define CATCH( code ) expPopNode(); \ + } else { \ + int code = expPopNode(); + +#define END_CATCH } } while( 0 ); + +#define THROW( x ) expThrow( (x) ) + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/util/inc/tbuffer.h b/src/util/inc/tbuffer.h index 9bc0fd9f43..2d8ea732cc 100644 --- a/src/util/inc/tbuffer.h +++ b/src/util/inc/tbuffer.h @@ -16,122 +16,170 @@ #ifndef TDENGINE_TBUFFER_H #define TDENGINE_TBUFFER_H -#include "setjmp.h" -#include "os.h" +#include +#include #ifdef __cplusplus extern "C" { #endif /* -SBuffer can be used to read or write a buffer, but cannot be used for both -read & write at a same time. Below is an example: +// SBuffer can be used to read or write a buffer, but cannot be used for both +// read & write at a same time. Below is an example: +#include +#include +#include "exception.h" +#include "tbuffer.h" -int main(int argc, char** argv) { - //--------------------- write ------------------------ - SBuffer wbuf; - int32_t code = tbufBeginWrite(&wbuf); - if (code != 0) { - // handle errors - return 0; - } +int foo() { + SBuffer wbuf, rbuf; + tbufSetup(&wbuf, NULL, false); + tbufSetup(&rbuf, NULL, false); - // reserve 1024 bytes for the buffer to improve performance - tbufEnsureCapacity(&wbuf, 1024); + TRY { + //--------------------- write ------------------------ + tbufBeginWrite(&wbuf); + // reserve 1024 bytes for the buffer to improve performance + tbufEnsureCapacity(&wbuf, 1024); + // write 5 integers to the buffer + for (int i = 0; i < 5; i++) { + tbufWriteInt32(&wbuf, i); + } + // write a string to the buffer + tbufWriteString(&wbuf, "this is a string.\n"); + // acquire the result and close the write buffer + size_t size = tbufTell(&wbuf); + char* data = tbufGetData(&wbuf, true); - // write 5 integers to the buffer - for (int i = 0; i < 5; i++) { - tbufWriteInt32(&wbuf, i); - } + //------------------------ read ----------------------- + tbufBeginRead(&rbuf, data, size); + // read & print out 5 integers + for (int i = 0; i < 5; i++) { + printf("%d\n", tbufReadInt32(&rbuf)); + } + // read & print out a string + puts(tbufReadString(&rbuf, NULL)); + // try read another integer, this result in an error as there no this integer + tbufReadInt32(&rbuf); + printf("you should not see this message.\n"); + } CATCH( code ) { + printf("exception code is: %d, you will see this message after print out 5 integers and a string.\n", code); + THROW( code ); + } END_CATCH - // write a string to the buffer - tbufWriteString(&wbuf, "this is a string.\n"); - - // acquire the result and close the write buffer - size_t size = tbufTell(&wbuf); - char* data = tbufGetData(&wbuf, true); tbufClose(&wbuf, true); - - - //------------------------ read ----------------------- - SBuffer rbuf; - code = tbufBeginRead(&rbuf, data, size); - if (code != 0) { - printf("you will see this message after print out 5 integers and a string.\n"); - tbufClose(&rbuf, false); - return 0; - } - - // read & print out 5 integers - for (int i = 0; i < 5; i++) { - printf("%d\n", tbufReadInt32(&rbuf)); - } - - // read & print out a string - printf(tbufReadString(&rbuf, NULL)); - - // try read another integer, this result in an error as there no this integer - tbufReadInt32(&rbuf); - - printf("you should not see this message.\n"); tbufClose(&rbuf, false); - return 0; } + +int main(int argc, char** argv) { + TRY { + printf("in main: you will see this line\n"); + foo(); + printf("in main: you will not see this line\n"); + } CATCH( code ) { + printf("foo raise an exception with code %d\n", code); + } END_CATCH + + return 0; +} */ + typedef struct { - jmp_buf jb; + void* (*allocator)(void*, size_t); + bool endian; char* data; size_t pos; size_t size; } SBuffer; // common functions can be used in both read & write -#define tbufThrowError(buf, code) longjmp((buf)->jb, (code)) + +// tbufSetup setup the buffer, should be called before tbufBeginRead / tbufBeginWrite +// *allocator*, function to allocate memory, will use 'realloc' if NULL +// *endian*, if true, read/write functions of primitive types will do 'ntoh' or 'hton' automatically +void tbufSetup(SBuffer* buf, void* (*allocator)(void*, size_t), bool endian); size_t tbufTell(SBuffer* buf); size_t tbufSeekTo(SBuffer* buf, size_t pos); -size_t tbufSkip(SBuffer* buf, size_t size); void tbufClose(SBuffer* buf, bool keepData); // basic read functions -#define tbufBeginRead(buf, _data, len) ((buf)->data = (char*)(_data), ((buf)->pos = 0), ((buf)->size = ((_data) == NULL) ? 0 : (len)), setjmp((buf)->jb)) +void tbufBeginRead(SBuffer* buf, void* data, size_t len); +size_t tbufSkip(SBuffer* buf, size_t size); char* tbufRead(SBuffer* buf, size_t size); void tbufReadToBuffer(SBuffer* buf, void* dst, size_t size); const char* tbufReadString(SBuffer* buf, size_t* len); size_t tbufReadToString(SBuffer* buf, char* dst, size_t size); +const char* tbufReadBinary(SBuffer* buf, size_t *len); +size_t tbufReadToBinary(SBuffer* buf, void* dst, size_t size); // basic write functions -#define tbufBeginWrite(buf) ((buf)->data = NULL, ((buf)->pos = 0), ((buf)->size = 0), setjmp((buf)->jb)) -void tbufEnsureCapacity(SBuffer* buf, size_t size); -char* tbufGetData(SBuffer* buf, bool takeOver); -void tbufWrite(SBuffer* buf, const void* data, size_t size); -void tbufWriteAt(SBuffer* buf, size_t pos, const void* data, size_t size); -void tbufWriteStringLen(SBuffer* buf, const char* str, size_t len); -void tbufWriteString(SBuffer* buf, const char* str); +void tbufBeginWrite(SBuffer* buf); +void tbufEnsureCapacity(SBuffer* buf, size_t size); +size_t tbufReserve(SBuffer* buf, size_t size); +char* tbufGetData(SBuffer* buf, bool takeOver); +void tbufWrite(SBuffer* buf, const void* data, size_t size); +void tbufWriteAt(SBuffer* buf, size_t pos, const void* data, size_t size); +void tbufWriteStringLen(SBuffer* buf, const char* str, size_t len); +void tbufWriteString(SBuffer* buf, const char* str); +// the prototype of WriteBinary and Write is identical +// the difference is: WriteBinary writes the length of the data to the buffer +// first, then the actual data, which means the reader don't need to know data +// size before read. Write only write the data itself, which means the reader +// need to know data size before read. +void tbufWriteBinary(SBuffer* buf, const void* data, size_t len); -// read & write function for primitive types -#ifndef TBUFFER_DEFINE_FUNCTION -#define TBUFFER_DEFINE_FUNCTION(type, name) \ - type tbufRead##name(SBuffer* buf); \ - void tbufWrite##name(SBuffer* buf, type data); \ - void tbufWrite##name##At(SBuffer* buf, size_t pos, type data); -#endif +// read / write functions for primitive types +bool tbufReadBool(SBuffer* buf); +void tbufWriteBool(SBuffer* buf, bool data); +void tbufWriteBoolAt(SBuffer* buf, size_t pos, bool data); -TBUFFER_DEFINE_FUNCTION(bool, Bool) -TBUFFER_DEFINE_FUNCTION(char, Char) -TBUFFER_DEFINE_FUNCTION(int8_t, Int8) -TBUFFER_DEFINE_FUNCTION(uint8_t, Uint8) -TBUFFER_DEFINE_FUNCTION(int16_t, Int16) -TBUFFER_DEFINE_FUNCTION(uint16_t, Uint16) -TBUFFER_DEFINE_FUNCTION(int32_t, Int32) -TBUFFER_DEFINE_FUNCTION(uint32_t, Uint32) -TBUFFER_DEFINE_FUNCTION(int64_t, Int64) -TBUFFER_DEFINE_FUNCTION(uint64_t, Uint64) -TBUFFER_DEFINE_FUNCTION(float, Float) -TBUFFER_DEFINE_FUNCTION(double, Double) +char tbufReadChar(SBuffer* buf); +void tbufWriteChar(SBuffer* buf, char data); +void tbufWriteCharAt(SBuffer* buf, size_t pos, char data); + +int8_t tbufReadInt8(SBuffer* buf); +void tbufWriteInt8(SBuffer* buf, int8_t data); +void tbufWriteInt8At(SBuffer* buf, size_t pos, int8_t data); + +uint8_t tbufReadUint8(SBuffer* buf); +void tbufWriteUint8(SBuffer* buf, uint8_t data); +void tbufWriteUint8At(SBuffer* buf, size_t pos, uint8_t data); + +int16_t tbufReadInt16(SBuffer* buf); +void tbufWriteInt16(SBuffer* buf, int16_t data); +void tbufWriteInt16At(SBuffer* buf, size_t pos, int16_t data); + +uint16_t tbufReadUint16(SBuffer* buf); +void tbufWriteUint16(SBuffer* buf, uint16_t data); +void tbufWriteUint16At(SBuffer* buf, size_t pos, uint16_t data); + +int32_t tbufReadInt32(SBuffer* buf); +void tbufWriteInt32(SBuffer* buf, int32_t data); +void tbufWriteInt32At(SBuffer* buf, size_t pos, int32_t data); + +uint32_t tbufReadUint32(SBuffer* buf); +void tbufWriteUint32(SBuffer* buf, uint32_t data); +void tbufWriteUint32At(SBuffer* buf, size_t pos, uint32_t data); + +int64_t tbufReadInt64(SBuffer* buf); +void tbufWriteInt64(SBuffer* buf, int64_t data); +void tbufWriteInt64At(SBuffer* buf, size_t pos, int64_t data); + +uint64_t tbufReadUint64(SBuffer* buf); +void tbufWriteUint64(SBuffer* buf, uint64_t data); +void tbufWriteUint64At(SBuffer* buf, size_t pos, uint64_t data); + +float tbufReadFloat(SBuffer* buf); +void tbufWriteFloat(SBuffer* buf, float data); +void tbufWriteFloatAt(SBuffer* buf, size_t pos, float data); + +double tbufReadDouble(SBuffer* buf); +void tbufWriteDouble(SBuffer* buf, double data); +void tbufWriteDoubleAt(SBuffer* buf, size_t pos, double data); #ifdef __cplusplus } #endif -#endif \ No newline at end of file +#endif diff --git a/src/util/src/exception.c b/src/util/src/exception.c new file mode 100644 index 0000000000..6363aaebf6 --- /dev/null +++ b/src/util/src/exception.c @@ -0,0 +1,20 @@ +#include "exception.h" + + +static _Thread_local SExceptionNode* expList; + +void expPushNode( SExceptionNode* node ) { + node->prev = expList; + expList = node; +} + +int expPopNode() { + SExceptionNode* node = expList; + expList = node->prev; + return node->code; +} + +void expThrow( int code ) { + expList->code = code; + longjmp( expList->jb, 1 ); +} diff --git a/src/util/src/tbuffer.c b/src/util/src/tbuffer.c index a83d7dddb0..c254436a4e 100644 --- a/src/util/src/tbuffer.c +++ b/src/util/src/tbuffer.c @@ -16,47 +16,44 @@ #include #include #include - -#define TBUFFER_DEFINE_FUNCTION(type, name) \ - type tbufRead##name(SBuffer* buf) { \ - type ret; \ - tbufReadToBuffer(buf, &ret, sizeof(type)); \ - return ret; \ - }\ - void tbufWrite##name(SBuffer* buf, type data) {\ - tbufWrite(buf, &data, sizeof(data));\ - }\ - void tbufWrite##name##At(SBuffer* buf, size_t pos, type data) {\ - tbufWriteAt(buf, pos, &data, sizeof(data));\ - } - +#include #include "tbuffer.h" - +#include "exception.h" +#include //////////////////////////////////////////////////////////////////////////////// // common functions +void tbufSetup( + SBuffer* buf, + void* (*allocator)(void*, size_t), + bool endian +) { + if (allocator != NULL) { + buf->allocator = allocator; + } else { + buf->allocator = realloc; + } + + buf->endian = endian; +} + size_t tbufTell(SBuffer* buf) { return buf->pos; } size_t tbufSeekTo(SBuffer* buf, size_t pos) { if (pos > buf->size) { - // TODO: update error code, other tbufThrowError need to be changed too - tbufThrowError(buf, 1); + THROW( TSDB_CODE_MEMORY_CORRUPTED ); } size_t old = buf->pos; buf->pos = pos; return old; } -size_t tbufSkip(SBuffer* buf, size_t size) { - return tbufSeekTo(buf, buf->pos + size); -} - void tbufClose(SBuffer* buf, bool keepData) { if (!keepData) { - free(buf->data); + (*buf->allocator)(buf->data, 0); } buf->data = NULL; buf->pos = 0; @@ -66,6 +63,16 @@ void tbufClose(SBuffer* buf, bool keepData) { //////////////////////////////////////////////////////////////////////////////// // read functions +void tbufBeginRead(SBuffer* buf, void* data, size_t len) { + buf->data = data; + buf->pos = 0; + buf->size = (data == NULL) ? 0 : len; +} + +size_t tbufSkip(SBuffer* buf, size_t size) { + return tbufSeekTo(buf, buf->pos + size); +} + char* tbufRead(SBuffer* buf, size_t size) { char* ret = buf->data + buf->pos; tbufSkip(buf, size); @@ -78,8 +85,16 @@ void tbufReadToBuffer(SBuffer* buf, void* dst, size_t size) { memcpy(dst, tbufRead(buf, size), size); } -const char* tbufReadString(SBuffer* buf, size_t* len) { +static size_t tbufReadLength(SBuffer* buf) { + // maximum length is 65535, if larger length is required + // this function and the corresponding write function need to be + // revised. uint16_t l = tbufReadUint16(buf); + return l; +} + +const char* tbufReadString(SBuffer* buf, size_t* len) { + size_t l = tbufReadLength(buf); char* ret = buf->data + buf->pos; tbufSkip(buf, l + 1); ret[l] = 0; // ensure the string end with '\0' @@ -101,23 +116,55 @@ size_t tbufReadToString(SBuffer* buf, char* dst, size_t size) { return len; } +const char* tbufReadBinary(SBuffer* buf, size_t *len) { + size_t l = tbufReadLength(buf); + char* ret = buf->data + buf->pos; + tbufSkip(buf, l); + if (len != NULL) { + *len = l; + } + return ret; +} + +size_t tbufReadToBinary(SBuffer* buf, void* dst, size_t size) { + assert(dst != NULL); + size_t len; + const char* data = tbufReadBinary(buf, &len); + if (len >= size) { + len = size; + } + memcpy(dst, data, len); + return len; +} //////////////////////////////////////////////////////////////////////////////// // write functions +void tbufBeginWrite(SBuffer* buf) { + buf->data = NULL; + buf->pos = 0; + buf->size = 0; +} + void tbufEnsureCapacity(SBuffer* buf, size_t size) { size += buf->pos; if (size > buf->size) { size_t nsize = size + buf->size; - char* data = realloc(buf->data, nsize); + char* data = (*buf->allocator)(buf->data, nsize); if (data == NULL) { - tbufThrowError(buf, 2); + // TODO: handle client out of memory + THROW( TSDB_CODE_SERV_OUT_OF_MEMORY ); } buf->data = data; buf->size = nsize; } } +size_t tbufReserve(SBuffer* buf, size_t size) { + tbufEnsureCapacity(buf, size); + return tbufSeekTo(buf, buf->pos + size); +} + char* tbufGetData(SBuffer* buf, bool takeOver) { char* ret = buf->data; if (takeOver) { @@ -129,13 +176,6 @@ char* tbufGetData(SBuffer* buf, bool takeOver) { return ret; } -void tbufEndWrite(SBuffer* buf) { - free(buf->data); - buf->data = NULL; - buf->pos = 0; - buf->size = 0; -} - void tbufWrite(SBuffer* buf, const void* data, size_t size) { assert(data != NULL); tbufEnsureCapacity(buf, size); @@ -151,15 +191,248 @@ void tbufWriteAt(SBuffer* buf, size_t pos, const void* data, size_t size) { memcpy(buf->data + pos, data, size); } -void tbufWriteStringLen(SBuffer* buf, const char* str, size_t len) { - // maximum string length is 65535, if longer string is required +static void tbufWriteLength(SBuffer* buf, size_t len) { + // maximum length is 65535, if larger length is required // this function and the corresponding read function need to be // revised. assert(len <= 0xffff); tbufWriteUint16(buf, (uint16_t)len); - tbufWrite(buf, str, len + 1); +} + +void tbufWriteStringLen(SBuffer* buf, const char* str, size_t len) { + tbufWriteLength(buf, len); + tbufWrite(buf, str, len); + tbufWriteChar(buf, '\0'); } void tbufWriteString(SBuffer* buf, const char* str) { tbufWriteStringLen(buf, str, strlen(str)); } + +void tbufWriteBinary(SBuffer* buf, const void* data, size_t len) { + tbufWriteLength(buf, len); + tbufWrite(buf, data, len); +} + +//////////////////////////////////////////////////////////////////////////////// +// read / write functions for primitive types + +bool tbufReadBool(SBuffer* buf) { + bool ret; + tbufReadToBuffer(buf, &ret, sizeof(ret)); + return ret; +} + +void tbufWriteBool(SBuffer* buf, bool data) { + tbufWrite(buf, &data, sizeof(data)); +} + +void tbufWriteBoolAt(SBuffer* buf, size_t pos, bool data) { + tbufWriteAt(buf, pos, &data, sizeof(data)); +} + +char tbufReadChar(SBuffer* buf) { + char ret; + tbufReadToBuffer(buf, &ret, sizeof(ret)); + return ret; +} + +void tbufWriteChar(SBuffer* buf, char data) { + tbufWrite(buf, &data, sizeof(data)); +} + +void tbufWriteCharAt(SBuffer* buf, size_t pos, char data) { + tbufWriteAt(buf, pos, &data, sizeof(data)); +} + +int8_t tbufReadInt8(SBuffer* buf) { + int8_t ret; + tbufReadToBuffer(buf, &ret, sizeof(ret)); + return ret; +} + +void tbufWriteInt8(SBuffer* buf, int8_t data) { + tbufWrite(buf, &data, sizeof(data)); +} + +void tbufWriteInt8At(SBuffer* buf, size_t pos, int8_t data) { + tbufWriteAt(buf, pos, &data, sizeof(data)); +} + +uint8_t tbufReadUint8(SBuffer* buf) { + uint8_t ret; + tbufReadToBuffer(buf, &ret, sizeof(ret)); + return ret; +} + +void tbufWriteUint8(SBuffer* buf, uint8_t data) { + tbufWrite(buf, &data, sizeof(data)); +} + +void tbufWriteUint8At(SBuffer* buf, size_t pos, uint8_t data) { + tbufWriteAt(buf, pos, &data, sizeof(data)); +} + +int16_t tbufReadInt16(SBuffer* buf) { + int16_t ret; + tbufReadToBuffer(buf, &ret, sizeof(ret)); + if (buf->endian) { + return (int16_t)ntohs(ret); + } + return ret; +} + +void tbufWriteInt16(SBuffer* buf, int16_t data) { + if (buf->endian) { + data = (int16_t)htons(data); + } + tbufWrite(buf, &data, sizeof(data)); +} + +void tbufWriteInt16At(SBuffer* buf, size_t pos, int16_t data) { + if (buf->endian) { + data = (int16_t)htons(data); + } + tbufWriteAt(buf, pos, &data, sizeof(data)); +} + +uint16_t tbufReadUint16(SBuffer* buf) { + uint16_t ret; + tbufReadToBuffer(buf, &ret, sizeof(ret)); + if (buf->endian) { + return ntohs(ret); + } + return ret; +} + +void tbufWriteUint16(SBuffer* buf, uint16_t data) { + if (buf->endian) { + data = htons(data); + } + tbufWrite(buf, &data, sizeof(data)); +} + +void tbufWriteUint16At(SBuffer* buf, size_t pos, uint16_t data) { + if (buf->endian) { + data = htons(data); + } + tbufWriteAt(buf, pos, &data, sizeof(data)); +} + +int32_t tbufReadInt32(SBuffer* buf) { + int32_t ret; + tbufReadToBuffer(buf, &ret, sizeof(ret)); + if (buf->endian) { + return (int32_t)ntohl(ret); + } + return ret; +} + +void tbufWriteInt32(SBuffer* buf, int32_t data) { + if (buf->endian) { + data = (int32_t)htonl(data); + } + tbufWrite(buf, &data, sizeof(data)); +} + +void tbufWriteInt32At(SBuffer* buf, size_t pos, int32_t data) { + if (buf->endian) { + data = (int32_t)htonl(data); + } + tbufWriteAt(buf, pos, &data, sizeof(data)); +} + +uint32_t tbufReadUint32(SBuffer* buf) { + uint32_t ret; + tbufReadToBuffer(buf, &ret, sizeof(ret)); + if (buf->endian) { + return ntohl(ret); + } + return ret; +} + +void tbufWriteUint32(SBuffer* buf, uint32_t data) { + if (buf->endian) { + data = htonl(data); + } + tbufWrite(buf, &data, sizeof(data)); +} + +void tbufWriteUint32At(SBuffer* buf, size_t pos, uint32_t data) { + if (buf->endian) { + data = htonl(data); + } + tbufWriteAt(buf, pos, &data, sizeof(data)); +} + +int64_t tbufReadInt64(SBuffer* buf) { + int64_t ret; + tbufReadToBuffer(buf, &ret, sizeof(ret)); + if (buf->endian) { + return (int64_t)htobe64(ret); // TODO: ntohll + } + return ret; +} + +void tbufWriteInt64(SBuffer* buf, int64_t data) { + if (buf->endian) { + data = (int64_t)htobe64(data); + } + tbufWrite(buf, &data, sizeof(data)); +} + +void tbufWriteInt64At(SBuffer* buf, size_t pos, int64_t data) { + if (buf->endian) { + data = (int64_t)htobe64(data); + } + tbufWriteAt(buf, pos, &data, sizeof(data)); +} + +uint64_t tbufReadUint64(SBuffer* buf) { + uint64_t ret; + tbufReadToBuffer(buf, &ret, sizeof(ret)); + if (buf->endian) { + return htobe64(ret); // TODO: ntohll + } + return ret; +} + +void tbufWriteUint64(SBuffer* buf, uint64_t data) { + if (buf->endian) { + data = htobe64(data); + } + tbufWrite(buf, &data, sizeof(data)); +} + +void tbufWriteUint64At(SBuffer* buf, size_t pos, uint64_t data) { + if (buf->endian) { + data = htobe64(data); + } + tbufWriteAt(buf, pos, &data, sizeof(data)); +} + +float tbufReadFloat(SBuffer* buf) { + uint32_t ret = tbufReadUint32(buf); + return *(float*)(&ret); +} + +void tbufWriteFloat(SBuffer* buf, float data) { + tbufWriteUint32(buf, *(uint32_t*)(&data)); +} + +void tbufWriteFloatAt(SBuffer* buf, size_t pos, float data) { + tbufWriteUint32At(buf, pos, *(uint32_t*)(&data)); +} + +double tbufReadDouble(SBuffer* buf) { + uint64_t ret = tbufReadUint64(buf); + return *(double*)(&ret); +} + +void tbufWriteDouble(SBuffer* buf, double data) { + tbufWriteUint64(buf, *(uint64_t*)(&data)); +} + +void tbufWriteDoubleAt(SBuffer* buf, size_t pos, double data) { + tbufWriteUint64At(buf, pos, *(uint64_t*)(&data)); +} From 4826a1851c56471a5aa19f23ba7e9cf7bb1a2ebc Mon Sep 17 00:00:00 2001 From: localvar Date: Fri, 24 Apr 2020 15:37:34 +0800 Subject: [PATCH 08/15] TD-153: add defer support --- src/util/inc/exception.h | 70 +++++++++++++++++++++++++++++++++------- src/util/src/exception.c | 29 +++++++++++++++-- 2 files changed, 85 insertions(+), 14 deletions(-) diff --git a/src/util/inc/exception.h b/src/util/inc/exception.h index 5c9506f802..19c37561d2 100644 --- a/src/util/inc/exception.h +++ b/src/util/inc/exception.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2019 TAOS Data, Inc. + * Copyright (c) 2020 TAOS Data, Inc. * * This program is free software: you can use, redistribute, and/or modify * it under the terms of the GNU Affero General Public License, version 3 @@ -17,33 +17,81 @@ #define TDENGINE_EXCEPTION_H #include +#include +#include #ifdef __cplusplus extern "C" { #endif +/* + * exception handling + */ typedef struct SExceptionNode { struct SExceptionNode* prev; jmp_buf jb; int code; } SExceptionNode; -void expPushNode( SExceptionNode* node ); -int expPopNode(); -void expThrow( int code ); +void exceptionPushNode( SExceptionNode* node ); +int exceptionPopNode(); +void exceptionThrow( int code ); + +#define THROW( x ) exceptionThrow( (x) ) +#define CAUGHT_EXCEPTION() (caught_exception == 1) #define TRY do { \ SExceptionNode expNode = { 0 }; \ - expPushNode( &expNode ); \ - if( setjmp(expNode.jb) == 0 ) { + exceptionPushNode( &expNode ); \ + int caught_exception = setjmp(expNode.jb); \ + if( caught_exception == 0 ) -#define CATCH( code ) expPopNode(); \ - } else { \ - int code = expPopNode(); +#define CATCH( code ) int code = exceptionPopNode(); \ + if( caught_exception == 1 ) -#define END_CATCH } } while( 0 ); +#define FINALLY( code ) int code = exceptionPopNode(); -#define THROW( x ) expThrow( (x) ) +#define END_TRY } while( 0 ); + + +/* + * defered operations + */ +typedef struct SDeferedOperation { + void (*wrapper)( struct SDeferedOperation* dp ); + void* func; + void* arg; +} SDeferedOperation; + +void deferExecute( SDeferedOperation* operations, unsigned int numOfOperations ); +void deferWrapper_void_void( SDeferedOperation* dp ); +void deferWrapper_void_ptr( SDeferedOperation* dp ); +void deferWrapper_int_int( SDeferedOperation* dp ); + +#define DEFER_INIT( MaxOperations ) unsigned int maxDeferedOperations = MaxOperations, numOfDeferedOperations = 0; \ + SDeferedOperation deferedOperations[MaxOperations] + +#define DEFER_PUSH( wrapperFunc, deferedFunc, argument ) do { \ + assert( numOfDeferedOperations < maxDeferedOperations ); \ + SDeferedOperation* dp = deferedOperations + numOfDeferedOperations++; \ + dp->wrapper = wrapperFunc; \ + dp->func = (void*)deferedFunc; \ + dp->arg = (void*)argument; \ +} while( 0 ) + +#define DEFER_POP() do { --numOfDeferedOperations; } while( 0 ) + +#define DEFER_EXECUTE() do{ \ + deferExecute( deferedOperations, numOfDeferedOperations ); \ + numOfDeferedOperations = 0; \ +} while( 0 ) + +#define DEFER_PUSH_VOID_PTR( func, arg ) DEFER_PUSH( deferWrapper_void_ptr, func, arg ) +#define DEFER_PUSH_INT_INT( func, arg ) DEFER_PUSH( deferWrapper_int_int, func, arg ) +#define DEFER_PUSH_VOID_VOID( func ) DEFER_PUSH( deferWrapper_void_void, func, 0 ) + +#define DEFER_PUSH_FREE( arg ) DEFER_PUSH( deferWrapper_void_ptr, free, arg ) +#define DEFER_PUSH_CLOSE( arg ) DEFER_PUSH( deferWrapper_int_int, close, arg ) #ifdef __cplusplus } diff --git a/src/util/src/exception.c b/src/util/src/exception.c index 6363aaebf6..45ebd349a5 100644 --- a/src/util/src/exception.c +++ b/src/util/src/exception.c @@ -3,18 +3,41 @@ static _Thread_local SExceptionNode* expList; -void expPushNode( SExceptionNode* node ) { +void exceptionPushNode( SExceptionNode* node ) { node->prev = expList; expList = node; } -int expPopNode() { +int exceptionPopNode() { SExceptionNode* node = expList; expList = node->prev; return node->code; } -void expThrow( int code ) { +void exceptionThrow( int code ) { expList->code = code; longjmp( expList->jb, 1 ); } + +void deferWrapper_void_ptr( SDeferedOperation* dp ) { + void (*func)( void* ) = dp->func; + func( dp->arg ); +} + +void deferWrapper_int_int( SDeferedOperation* dp ) { + int (*func)( int ) = dp->func; + func( (int)(intptr_t)(dp->arg) ); +} + +void deferWrapper_void_void( SDeferedOperation* dp ) { + void (*func)() = dp->func; + func(); +} + +void deferExecute( SDeferedOperation* operations, unsigned int numOfOperations ) { + while( numOfOperations > 0 ) { + --numOfOperations; + SDeferedOperation* dp = operations + numOfOperations; + dp->wrapper( dp ); + } +} From c24f7ea7bf1a3fc404c502682a85671fe33b66b0 Mon Sep 17 00:00:00 2001 From: localvar Date: Mon, 27 Apr 2020 08:03:32 +0800 Subject: [PATCH 09/15] update defer and rename it to cleanup --- src/util/inc/exception.h | 116 ++++++++++++++++++++++----------------- src/util/src/exception.c | 108 +++++++++++++++++++++++++++++++----- 2 files changed, 159 insertions(+), 65 deletions(-) diff --git a/src/util/inc/exception.h b/src/util/inc/exception.h index 19c37561d2..229ba89d04 100644 --- a/src/util/inc/exception.h +++ b/src/util/inc/exception.h @@ -25,73 +25,87 @@ extern "C" { #endif /* - * exception handling + * cleanup actions + */ +typedef struct SCleanupAction { + bool failOnly; + uint8_t wrapper; + uint16_t reserved; + void* func; + union { + void* Ptr; + bool Bool; + char Char; + int8_t Int8; + uint8_t Uint8; + int16_t Int16; + uint16_t Uint16; + int Int; + unsigned int Uint; + int32_t Int32; + uint32_t Uint32; + int64_t Int64; + uint64_t Uint64; + float Float; + double Double; + } arg1, arg2; +} SCleanupAction; + +void cleanupPush_void_ptr_ptr ( bool failOnly, void* func, void* arg1, void* arg2 ); +void cleanupPush_void_ptr_bool ( bool failOnly, void* func, void* arg1, bool arg2 ); +void cleanupPush_void_ptr ( bool failOnly, void* func, void* arg ); +void cleanupPush_int_int ( bool failOnly, void* func, int arg ); +void cleanupPush_void ( bool failOnly, void* func ); + +int32_t cleanupGetActionCount(); +void cleanupExecute( bool failed, int32_t toIndex ); + +#define CLEANUP_PUSH_VOID_PTR_PTR( failOnly, func, arg1, arg2 ) cleanupPush_void_ptr_ptr( (failOnly), (void*)(func), (void*)(arg1), (void*)(arg2) ) +#define CLEANUP_PUSH_VOID_PTR_BOOL( failOnly, func, arg1, arg2 ) cleanupPush_void_ptr_bool( (failOnly), (void*)(func), (void*)(arg1), (bool)(arg2) ) +#define CLEANUP_PUSH_VOID_PTR( failOnly, func, arg ) cleanupPush_void_ptr( (failOnly), (void*)(func), (void*)(arg) ) +#define CLEANUP_PUSH_INT_INT( failOnly, func, arg ) cleanupPush_void_ptr( (failOnly), (void*)(func), (int)(arg) ) +#define CLEANUP_PUSH_VOID( failOnly, func ) cleanupPush_void( (failOnly), (void*)(func) ) +#define CLEANUP_PUSH_FREE( failOnly, arg ) cleanupPush_void_ptr( (failOnly), free, (void*)(arg) ) +#define CLEANUP_PUSH_CLOSE( failOnly, arg ) cleanupPush_int_int( (failOnly), close, (int)(arg) ) + +#define CLEANUP_CREATE_ANCHOR() int32_t cleanupAnchor = cleanupGetActionCount() +#define CLEANUP_EXECUTE( failed ) cleanupExecute( cleanupAnchor, (failed) ) + +/* + * exception hander registration */ typedef struct SExceptionNode { struct SExceptionNode* prev; jmp_buf jb; - int code; + int32_t code; + int32_t maxCleanupAction; + int32_t numCleanupAction; + SCleanupAction* cleanupActions; } SExceptionNode; void exceptionPushNode( SExceptionNode* node ); -int exceptionPopNode(); +int32_t exceptionPopNode(); void exceptionThrow( int code ); -#define THROW( x ) exceptionThrow( (x) ) -#define CAUGHT_EXCEPTION() (caught_exception == 1) - -#define TRY do { \ - SExceptionNode expNode = { 0 }; \ - exceptionPushNode( &expNode ); \ - int caught_exception = setjmp(expNode.jb); \ - if( caught_exception == 0 ) +#define TRY(maxCleanupActions) do { \ + SExceptionNode exceptionNode = { 0 }; \ + SDeferedOperation cleanupActions[maxCleanupActions > 0 ? maxCleanupActions : 1]; \ + exceptionNode.maxCleanupAction = maxCleanupActions > 0 ? maxDefered : 1; \ + exceptionNode.cleanupActions = cleanupActions; \ + int32_t cleanupAnchor = 0; \ + exceptionPushNode( &exceptionNode ); \ + int caughtException = setjmp( exceptionNode.jb ); \ + if( caughtException == 0 ) #define CATCH( code ) int code = exceptionPopNode(); \ - if( caught_exception == 1 ) + if( caughtEexception == 1 ) #define FINALLY( code ) int code = exceptionPopNode(); #define END_TRY } while( 0 ); - -/* - * defered operations - */ -typedef struct SDeferedOperation { - void (*wrapper)( struct SDeferedOperation* dp ); - void* func; - void* arg; -} SDeferedOperation; - -void deferExecute( SDeferedOperation* operations, unsigned int numOfOperations ); -void deferWrapper_void_void( SDeferedOperation* dp ); -void deferWrapper_void_ptr( SDeferedOperation* dp ); -void deferWrapper_int_int( SDeferedOperation* dp ); - -#define DEFER_INIT( MaxOperations ) unsigned int maxDeferedOperations = MaxOperations, numOfDeferedOperations = 0; \ - SDeferedOperation deferedOperations[MaxOperations] - -#define DEFER_PUSH( wrapperFunc, deferedFunc, argument ) do { \ - assert( numOfDeferedOperations < maxDeferedOperations ); \ - SDeferedOperation* dp = deferedOperations + numOfDeferedOperations++; \ - dp->wrapper = wrapperFunc; \ - dp->func = (void*)deferedFunc; \ - dp->arg = (void*)argument; \ -} while( 0 ) - -#define DEFER_POP() do { --numOfDeferedOperations; } while( 0 ) - -#define DEFER_EXECUTE() do{ \ - deferExecute( deferedOperations, numOfDeferedOperations ); \ - numOfDeferedOperations = 0; \ -} while( 0 ) - -#define DEFER_PUSH_VOID_PTR( func, arg ) DEFER_PUSH( deferWrapper_void_ptr, func, arg ) -#define DEFER_PUSH_INT_INT( func, arg ) DEFER_PUSH( deferWrapper_int_int, func, arg ) -#define DEFER_PUSH_VOID_VOID( func ) DEFER_PUSH( deferWrapper_void_void, func, 0 ) - -#define DEFER_PUSH_FREE( arg ) DEFER_PUSH( deferWrapper_void_ptr, free, arg ) -#define DEFER_PUSH_CLOSE( arg ) DEFER_PUSH( deferWrapper_int_int, close, arg ) +#define THROW( x ) exceptionThrow( (x) ) +#define CAUGHT_EXCEPTION() ((bool)(caughtEexception == 1)) #ifdef __cplusplus } diff --git a/src/util/src/exception.c b/src/util/src/exception.c index 45ebd349a5..b0e8fce371 100644 --- a/src/util/src/exception.c +++ b/src/util/src/exception.c @@ -8,7 +8,7 @@ void exceptionPushNode( SExceptionNode* node ) { expList = node; } -int exceptionPopNode() { +int32_t exceptionPopNode() { SExceptionNode* node = expList; expList = node->prev; return node->code; @@ -19,25 +19,105 @@ void exceptionThrow( int code ) { longjmp( expList->jb, 1 ); } -void deferWrapper_void_ptr( SDeferedOperation* dp ) { - void (*func)( void* ) = dp->func; - func( dp->arg ); + + +static void cleanupWrapper_void_ptr_ptr( SCleanupAction* ca ) { + void (*func)( void*, void* ) = ac->func; + func( ca->arg1.Ptr, ca->arg2.Ptr ); } -void deferWrapper_int_int( SDeferedOperation* dp ) { - int (*func)( int ) = dp->func; - func( (int)(intptr_t)(dp->arg) ); +static void cleanupWrapper_void_ptr_bool( SCleanupAction* ca ) { + void (*func)( void*, bool ) = ca->func; + func( ca->arg1.Ptr, ca->arg2.Bool ); } -void deferWrapper_void_void( SDeferedOperation* dp ) { - void (*func)() = dp->func; +static void cleanupWrapper_void_ptr( SCleanupAction* ca ) { + void (*func)( void* ) = ca->func; + func( ca->arg1.Ptr ); +} + +static void cleanupWrapper_int_int( SCleanupAction* ca ) { + int (*func)( int ) = ca->func; + func( (int)(intptr_t)(ca->arg1.Int) ); +} + +static void cleanupWrapper_void_void( SCleanupAction* ca ) { + void (*func)() = ca->func; func(); } -void deferExecute( SDeferedOperation* operations, unsigned int numOfOperations ) { - while( numOfOperations > 0 ) { - --numOfOperations; - SDeferedOperation* dp = operations + numOfOperations; - dp->wrapper( dp ); +static void (*wrappers)(SCleanupAction*)[] = { + cleanupWrapper_void_ptr_ptr, + cleanupWrapper_void_ptr_bool, + cleanupWrapper_void_ptr, + cleanupWrapper_int_int, + cleanupWrapper_void_void, +}; + + +void cleanupPush_void_ptr_ptr( bool failOnly, void* func, void* arg1, void* arg2 ) { + assert( expList->numCleanupAction < expList->maxCleanupAction ); + + SCleanupAction *ac = expList->cleanupActions + expList->numCleanupAction++; + ac->wrapper = 0; + ac->failOnly = failOnly; + ac->func = func; + ac->arg1.Ptr = arg1; + ac->arg2.Ptr = arg2; +} + +void cleanupPush_void_ptr_bool( bool failOnly, void* func, void* arg1, bool arg2 ) { + assert( expList->numCleanupAction < expList->maxCleanupAction ); + + SCleanupAction *ac = expList->cleanupActions + expList->numCleanupAction++; + ac->wrapper = 1; + ac->failOnly = failOnly; + ac->func = func; + ac->arg1.Ptr = arg1; + ac->arg2.Bool = arg2; +} + +void cleanupPush_void_ptr( bool failOnly, void* func, void* arg ) { + assert( expList->numCleanupAction < expList->maxCleanupAction ); + + SCleanupAction *ac = expList->cleanupActions + expList->numCleanupAction++; + ac->wrapper = 2; + ac->failOnly = failOnly; + ac->func = func; + ac->arg1.Ptr = arg1; +} + +void cleanupPush_int_int( bool failOnly, void* func, int arg ) { + assert( expList->numCleanupAction < expList->maxCleanupAction ); + + SCleanupAction *ac = expList->cleanupActions + expList->numCleanupAction++; + ac->wrapper = 3; + ac->failOnly = failOnly; + ac->func = func; + ac->arg1.Int = arg; +} + +void cleanupPush_void( bool failOnly, void* func ) { + assert( expList->numCleanupAction < expList->maxCleanupAction ); + + SCleanupAction *ac = expList->cleanupActions + expList->numCleanupAction++; + ac->wrapper = 4; + ac->failOnly = failOnly; + ac->func = func; +} + + + +int32 cleanupGetActionCount() { + return expList->numCleanupAction; +} + + +void cleanupExecute( int32_t anchor, bool failed ) { + while( expList->numCleanupAction > anchor ) { + --expList->numCleanupAction; + SCleanupAction *ac = expList->cleanupActions + expList->numCleanupAction; + if( failed || !(ac->failOnly) ) + ac->wrapper( ac ); } } From 058c8912e303bf389aa21f6a60256d18e0a21a43 Mon Sep 17 00:00:00 2001 From: localvar Date: Mon, 27 Apr 2020 08:17:54 +0800 Subject: [PATCH 10/15] fix compile errors --- src/util/inc/exception.h | 3 +- src/util/src/exception.c | 65 ++++++++++++++++++++-------------------- 2 files changed, 35 insertions(+), 33 deletions(-) diff --git a/src/util/inc/exception.h b/src/util/inc/exception.h index 229ba89d04..32e2fcb61b 100644 --- a/src/util/inc/exception.h +++ b/src/util/inc/exception.h @@ -18,6 +18,7 @@ #include #include +#include #include #ifdef __cplusplus @@ -58,7 +59,7 @@ void cleanupPush_int_int ( bool failOnly, void* func, int arg ); void cleanupPush_void ( bool failOnly, void* func ); int32_t cleanupGetActionCount(); -void cleanupExecute( bool failed, int32_t toIndex ); +void cleanupExecute( int32_t anchor, bool failed ); #define CLEANUP_PUSH_VOID_PTR_PTR( failOnly, func, arg1, arg2 ) cleanupPush_void_ptr_ptr( (failOnly), (void*)(func), (void*)(arg1), (void*)(arg2) ) #define CLEANUP_PUSH_VOID_PTR_BOOL( failOnly, func, arg1, arg2 ) cleanupPush_void_ptr_bool( (failOnly), (void*)(func), (void*)(arg1), (bool)(arg2) ) diff --git a/src/util/src/exception.c b/src/util/src/exception.c index b0e8fce371..27cf6fbcd6 100644 --- a/src/util/src/exception.c +++ b/src/util/src/exception.c @@ -22,7 +22,7 @@ void exceptionThrow( int code ) { static void cleanupWrapper_void_ptr_ptr( SCleanupAction* ca ) { - void (*func)( void*, void* ) = ac->func; + void (*func)( void*, void* ) = ca->func; func( ca->arg1.Ptr, ca->arg2.Ptr ); } @@ -46,7 +46,8 @@ static void cleanupWrapper_void_void( SCleanupAction* ca ) { func(); } -static void (*wrappers)(SCleanupAction*)[] = { +typedef void (*wrapper)(SCleanupAction*); +static wrapper wrappers[] = { cleanupWrapper_void_ptr_ptr, cleanupWrapper_void_ptr_bool, cleanupWrapper_void_ptr, @@ -58,57 +59,57 @@ static void (*wrappers)(SCleanupAction*)[] = { void cleanupPush_void_ptr_ptr( bool failOnly, void* func, void* arg1, void* arg2 ) { assert( expList->numCleanupAction < expList->maxCleanupAction ); - SCleanupAction *ac = expList->cleanupActions + expList->numCleanupAction++; - ac->wrapper = 0; - ac->failOnly = failOnly; - ac->func = func; - ac->arg1.Ptr = arg1; - ac->arg2.Ptr = arg2; + SCleanupAction *ca = expList->cleanupActions + expList->numCleanupAction++; + ca->wrapper = 0; + ca->failOnly = failOnly; + ca->func = func; + ca->arg1.Ptr = arg1; + ca->arg2.Ptr = arg2; } void cleanupPush_void_ptr_bool( bool failOnly, void* func, void* arg1, bool arg2 ) { assert( expList->numCleanupAction < expList->maxCleanupAction ); - SCleanupAction *ac = expList->cleanupActions + expList->numCleanupAction++; - ac->wrapper = 1; - ac->failOnly = failOnly; - ac->func = func; - ac->arg1.Ptr = arg1; - ac->arg2.Bool = arg2; + SCleanupAction *ca = expList->cleanupActions + expList->numCleanupAction++; + ca->wrapper = 1; + ca->failOnly = failOnly; + ca->func = func; + ca->arg1.Ptr = arg1; + ca->arg2.Bool = arg2; } void cleanupPush_void_ptr( bool failOnly, void* func, void* arg ) { assert( expList->numCleanupAction < expList->maxCleanupAction ); - SCleanupAction *ac = expList->cleanupActions + expList->numCleanupAction++; - ac->wrapper = 2; - ac->failOnly = failOnly; - ac->func = func; - ac->arg1.Ptr = arg1; + SCleanupAction *ca = expList->cleanupActions + expList->numCleanupAction++; + ca->wrapper = 2; + ca->failOnly = failOnly; + ca->func = func; + ca->arg1.Ptr = arg; } void cleanupPush_int_int( bool failOnly, void* func, int arg ) { assert( expList->numCleanupAction < expList->maxCleanupAction ); - SCleanupAction *ac = expList->cleanupActions + expList->numCleanupAction++; - ac->wrapper = 3; - ac->failOnly = failOnly; - ac->func = func; - ac->arg1.Int = arg; + SCleanupAction *ca = expList->cleanupActions + expList->numCleanupAction++; + ca->wrapper = 3; + ca->failOnly = failOnly; + ca->func = func; + ca->arg1.Int = arg; } void cleanupPush_void( bool failOnly, void* func ) { assert( expList->numCleanupAction < expList->maxCleanupAction ); - SCleanupAction *ac = expList->cleanupActions + expList->numCleanupAction++; - ac->wrapper = 4; - ac->failOnly = failOnly; - ac->func = func; + SCleanupAction *ca = expList->cleanupActions + expList->numCleanupAction++; + ca->wrapper = 4; + ca->failOnly = failOnly; + ca->func = func; } -int32 cleanupGetActionCount() { +int32_t cleanupGetActionCount() { return expList->numCleanupAction; } @@ -116,8 +117,8 @@ int32 cleanupGetActionCount() { void cleanupExecute( int32_t anchor, bool failed ) { while( expList->numCleanupAction > anchor ) { --expList->numCleanupAction; - SCleanupAction *ac = expList->cleanupActions + expList->numCleanupAction; - if( failed || !(ac->failOnly) ) - ac->wrapper( ac ); + SCleanupAction *ca = expList->cleanupActions + expList->numCleanupAction; + if( failed || !(ca->failOnly) ) + wrappers[ca->wrapper]( ca ); } } From 9fdf34552b8eb5ba111ec128c2777e44f30da24f Mon Sep 17 00:00:00 2001 From: localvar Date: Mon, 27 Apr 2020 09:16:56 +0800 Subject: [PATCH 11/15] split SBuffer to Reader & Writer --- src/util/inc/tbuffer.h | 230 +++++++------------ src/util/src/tbuffer.c | 499 +++++++++++++++++++---------------------- 2 files changed, 316 insertions(+), 413 deletions(-) diff --git a/src/util/inc/tbuffer.h b/src/util/inc/tbuffer.h index 2d8ea732cc..103b3710cf 100644 --- a/src/util/inc/tbuffer.h +++ b/src/util/inc/tbuffer.h @@ -23,160 +23,102 @@ extern "C" { #endif -/* -// SBuffer can be used to read or write a buffer, but cannot be used for both -// read & write at a same time. Below is an example: -#include -#include -#include "exception.h" -#include "tbuffer.h" - -int foo() { - SBuffer wbuf, rbuf; - tbufSetup(&wbuf, NULL, false); - tbufSetup(&rbuf, NULL, false); - - TRY { - //--------------------- write ------------------------ - tbufBeginWrite(&wbuf); - // reserve 1024 bytes for the buffer to improve performance - tbufEnsureCapacity(&wbuf, 1024); - // write 5 integers to the buffer - for (int i = 0; i < 5; i++) { - tbufWriteInt32(&wbuf, i); - } - // write a string to the buffer - tbufWriteString(&wbuf, "this is a string.\n"); - // acquire the result and close the write buffer - size_t size = tbufTell(&wbuf); - char* data = tbufGetData(&wbuf, true); - - //------------------------ read ----------------------- - tbufBeginRead(&rbuf, data, size); - // read & print out 5 integers - for (int i = 0; i < 5; i++) { - printf("%d\n", tbufReadInt32(&rbuf)); - } - // read & print out a string - puts(tbufReadString(&rbuf, NULL)); - // try read another integer, this result in an error as there no this integer - tbufReadInt32(&rbuf); - printf("you should not see this message.\n"); - } CATCH( code ) { - printf("exception code is: %d, you will see this message after print out 5 integers and a string.\n", code); - THROW( code ); - } END_CATCH - - tbufClose(&wbuf, true); - tbufClose(&rbuf, false); - return 0; -} - -int main(int argc, char** argv) { - TRY { - printf("in main: you will see this line\n"); - foo(); - printf("in main: you will not see this line\n"); - } CATCH( code ) { - printf("foo raise an exception with code %d\n", code); - } END_CATCH - - return 0; -} -*/ +typedef struct { + bool endian; + const char* data; + size_t pos; + size_t size; +} SBufferReader; typedef struct { - void* (*allocator)(void*, size_t); - bool endian; - char* data; - size_t pos; - size_t size; -} SBuffer; + bool endian; + char* data; + size_t pos; + size_t size; + void* (*allocator)( void*, size_t ); +} SBufferWriter; -// common functions can be used in both read & write +//////////////////////////////////////////////////////////////////////////////// +// common functions & macros for both reader & writer +#define tbufTell( buf ) ((buf)->pos) -// tbufSetup setup the buffer, should be called before tbufBeginRead / tbufBeginWrite -// *allocator*, function to allocate memory, will use 'realloc' if NULL -// *endian*, if true, read/write functions of primitive types will do 'ntoh' or 'hton' automatically -void tbufSetup(SBuffer* buf, void* (*allocator)(void*, size_t), bool endian); -size_t tbufTell(SBuffer* buf); -size_t tbufSeekTo(SBuffer* buf, size_t pos); -void tbufClose(SBuffer* buf, bool keepData); -// basic read functions -void tbufBeginRead(SBuffer* buf, void* data, size_t len); -size_t tbufSkip(SBuffer* buf, size_t size); -char* tbufRead(SBuffer* buf, size_t size); -void tbufReadToBuffer(SBuffer* buf, void* dst, size_t size); -const char* tbufReadString(SBuffer* buf, size_t* len); -size_t tbufReadToString(SBuffer* buf, char* dst, size_t size); -const char* tbufReadBinary(SBuffer* buf, size_t *len); -size_t tbufReadToBinary(SBuffer* buf, void* dst, size_t size); +//////////////////////////////////////////////////////////////////////////////// +// reader functions & macros -// basic write functions -void tbufBeginWrite(SBuffer* buf); -void tbufEnsureCapacity(SBuffer* buf, size_t size); -size_t tbufReserve(SBuffer* buf, size_t size); -char* tbufGetData(SBuffer* buf, bool takeOver); -void tbufWrite(SBuffer* buf, const void* data, size_t size); -void tbufWriteAt(SBuffer* buf, size_t pos, const void* data, size_t size); -void tbufWriteStringLen(SBuffer* buf, const char* str, size_t len); -void tbufWriteString(SBuffer* buf, const char* str); -// the prototype of WriteBinary and Write is identical -// the difference is: WriteBinary writes the length of the data to the buffer +// *Endian*, if true, reader functions of primitive types will do 'ntoh' automatically +#define tbufInitReader( Data, Size, Endian ) {.endian = (Endian), .data = (Data), .pos = 0, .size = ((Data) == NULL ? 0 :(Size))} + +size_t tbufSkip( SBufferReader* buf, size_t size ); + +char* tbufRead( SBufferReader* buf, size_t size ); +void tbufReadToBuffer( SBufferReader* buf, void* dst, size_t size ); +const char* tbufReadString( SBufferReader* buf, size_t* len ); +size_t tbufReadToString( SBufferReader* buf, char* dst, size_t size ); +const char* tbufReadBinary( SBufferReader* buf, size_t *len ); +size_t tbufReadToBinary( SBufferReader* buf, void* dst, size_t size ); + +bool tbufReadBool( SBufferReader* buf ); +char tbufReadChar( SBufferReader* buf ); +int8_t tbufReadInt8( SBufferReader* buf ); +uint8_t tbufReadUint8( SBufferReader* buf ); +int16_t tbufReadInt16( SBufferReader* buf ); +uint16_t tbufReadUint16( SBufferReader* buf ); +int32_t tbufReadInt32( SBufferReader* buf ); +uint32_t tbufReadUint32( SBufferReader* buf ); +int64_t tbufReadInt64( SBufferReader* buf ); +uint64_t tbufReadUint64( SBufferReader* buf ); +float tbufReadFloat( SBufferReader* buf ); +double tbufReadDouble( SBufferReader* buf ); + + +//////////////////////////////////////////////////////////////////////////////// +// writer functions & macros + +// *Allocator*, function to allocate memory, will use 'realloc' if NULL +// *Endian*, if true, writer functions of primitive types will do 'hton' automatically +#define tbufInitWriter( Allocator, Endian ) {.endian = (Endian), .data = NULL, .pos = 0, .size = 0, .allocator = ((Allocator) == NULL ? realloc : (Allocator))} +void tbufCloseWriter( SBufferWriter* buf ); + +void tbufEnsureCapacity( SBufferWriter* buf, size_t size ); +size_t tbufReserve( SBufferWriter* buf, size_t size ); +char* tbufGetData( SBufferWriter* buf, bool takeOver ); + +void tbufWrite( SBufferWriter* buf, const void* data, size_t size ); +void tbufWriteAt( SBufferWriter* buf, size_t pos, const void* data, size_t size ); +void tbufWriteStringLen( SBufferWriter* buf, const char* str, size_t len ); +void tbufWriteString( SBufferWriter* buf, const char* str ); +// the prototype of tbufWriteBinary and tbufWrite are identical +// the difference is: tbufWriteBinary writes the length of the data to the buffer // first, then the actual data, which means the reader don't need to know data // size before read. Write only write the data itself, which means the reader // need to know data size before read. -void tbufWriteBinary(SBuffer* buf, const void* data, size_t len); +void tbufWriteBinary( SBufferWriter* buf, const void* data, size_t len ); -// read / write functions for primitive types -bool tbufReadBool(SBuffer* buf); -void tbufWriteBool(SBuffer* buf, bool data); -void tbufWriteBoolAt(SBuffer* buf, size_t pos, bool data); - -char tbufReadChar(SBuffer* buf); -void tbufWriteChar(SBuffer* buf, char data); -void tbufWriteCharAt(SBuffer* buf, size_t pos, char data); - -int8_t tbufReadInt8(SBuffer* buf); -void tbufWriteInt8(SBuffer* buf, int8_t data); -void tbufWriteInt8At(SBuffer* buf, size_t pos, int8_t data); - -uint8_t tbufReadUint8(SBuffer* buf); -void tbufWriteUint8(SBuffer* buf, uint8_t data); -void tbufWriteUint8At(SBuffer* buf, size_t pos, uint8_t data); - -int16_t tbufReadInt16(SBuffer* buf); -void tbufWriteInt16(SBuffer* buf, int16_t data); -void tbufWriteInt16At(SBuffer* buf, size_t pos, int16_t data); - -uint16_t tbufReadUint16(SBuffer* buf); -void tbufWriteUint16(SBuffer* buf, uint16_t data); -void tbufWriteUint16At(SBuffer* buf, size_t pos, uint16_t data); - -int32_t tbufReadInt32(SBuffer* buf); -void tbufWriteInt32(SBuffer* buf, int32_t data); -void tbufWriteInt32At(SBuffer* buf, size_t pos, int32_t data); - -uint32_t tbufReadUint32(SBuffer* buf); -void tbufWriteUint32(SBuffer* buf, uint32_t data); -void tbufWriteUint32At(SBuffer* buf, size_t pos, uint32_t data); - -int64_t tbufReadInt64(SBuffer* buf); -void tbufWriteInt64(SBuffer* buf, int64_t data); -void tbufWriteInt64At(SBuffer* buf, size_t pos, int64_t data); - -uint64_t tbufReadUint64(SBuffer* buf); -void tbufWriteUint64(SBuffer* buf, uint64_t data); -void tbufWriteUint64At(SBuffer* buf, size_t pos, uint64_t data); - -float tbufReadFloat(SBuffer* buf); -void tbufWriteFloat(SBuffer* buf, float data); -void tbufWriteFloatAt(SBuffer* buf, size_t pos, float data); - -double tbufReadDouble(SBuffer* buf); -void tbufWriteDouble(SBuffer* buf, double data); -void tbufWriteDoubleAt(SBuffer* buf, size_t pos, double data); +void tbufWriteBool( SBufferWriter* buf, bool data ); +void tbufWriteBoolAt( SBufferWriter* buf, size_t pos, bool data ); +void tbufWriteChar( SBufferWriter* buf, char data ); +void tbufWriteCharAt( SBufferWriter* buf, size_t pos, char data ); +void tbufWriteInt8( SBufferWriter* buf, int8_t data ); +void tbufWriteInt8At( SBufferWriter* buf, size_t pos, int8_t data ); +void tbufWriteUint8( SBufferWriter* buf, uint8_t data ); +void tbufWriteUint8At( SBufferWriter* buf, size_t pos, uint8_t data ); +void tbufWriteInt16( SBufferWriter* buf, int16_t data ); +void tbufWriteInt16At( SBufferWriter* buf, size_t pos, int16_t data ); +void tbufWriteUint16( SBufferWriter* buf, uint16_t data ); +void tbufWriteUint16At( SBufferWriter* buf, size_t pos, uint16_t data ); +void tbufWriteInt32( SBufferWriter* buf, int32_t data ); +void tbufWriteInt32At( SBufferWriter* buf, size_t pos, int32_t data ); +void tbufWriteUint32( SBufferWriter* buf, uint32_t data ); +void tbufWriteUint32At( SBufferWriter* buf, size_t pos, uint32_t data ); +void tbufWriteInt64( SBufferWriter* buf, int64_t data ); +void tbufWriteInt64At( SBufferWriter* buf, size_t pos, int64_t data ); +void tbufWriteUint64( SBufferWriter* buf, uint64_t data ); +void tbufWriteUint64At( SBufferWriter* buf, size_t pos, uint64_t data ); +void tbufWriteFloat( SBufferWriter* buf, float data ); +void tbufWriteFloatAt( SBufferWriter* buf, size_t pos, float data ); +void tbufWriteDouble( SBufferWriter* buf, double data ); +void tbufWriteDoubleAt( SBufferWriter* buf, size_t pos, double data ); #ifdef __cplusplus } diff --git a/src/util/src/tbuffer.c b/src/util/src/tbuffer.c index c254436a4e..b2ded0203e 100644 --- a/src/util/src/tbuffer.c +++ b/src/util/src/tbuffer.c @@ -22,137 +22,188 @@ #include //////////////////////////////////////////////////////////////////////////////// -// common functions +// reader functions -void tbufSetup( - SBuffer* buf, - void* (*allocator)(void*, size_t), - bool endian -) { - if (allocator != NULL) { - buf->allocator = allocator; - } else { - buf->allocator = realloc; - } - - buf->endian = endian; -} - -size_t tbufTell(SBuffer* buf) { - return buf->pos; -} - -size_t tbufSeekTo(SBuffer* buf, size_t pos) { - if (pos > buf->size) { +size_t tbufSkip(SBufferReader* buf, size_t size) { + if( (buf->pos + size) > buf->size ) { THROW( TSDB_CODE_MEMORY_CORRUPTED ); } size_t old = buf->pos; - buf->pos = pos; + buf->pos += size; return old; } -void tbufClose(SBuffer* buf, bool keepData) { - if (!keepData) { - (*buf->allocator)(buf->data, 0); - } - buf->data = NULL; - buf->pos = 0; - buf->size = 0; -} - -//////////////////////////////////////////////////////////////////////////////// -// read functions - -void tbufBeginRead(SBuffer* buf, void* data, size_t len) { - buf->data = data; - buf->pos = 0; - buf->size = (data == NULL) ? 0 : len; -} - -size_t tbufSkip(SBuffer* buf, size_t size) { - return tbufSeekTo(buf, buf->pos + size); -} - -char* tbufRead(SBuffer* buf, size_t size) { +char* tbufRead( SBufferReader* buf, size_t size ) { char* ret = buf->data + buf->pos; - tbufSkip(buf, size); + tbufSkip( buf, size ); return ret; } -void tbufReadToBuffer(SBuffer* buf, void* dst, size_t size) { - assert(dst != NULL); +void tbufReadToBuffer( SBufferReader* buf, void* dst, size_t size ) { + assert( dst != NULL ); // always using memcpy, leave optimization to compiler - memcpy(dst, tbufRead(buf, size), size); + memcpy( dst, tbufRead(buf, size), size ); } -static size_t tbufReadLength(SBuffer* buf) { +static size_t tbufReadLength( SBufferReader* buf ) { // maximum length is 65535, if larger length is required // this function and the corresponding write function need to be // revised. - uint16_t l = tbufReadUint16(buf); + uint16_t l = tbufReadUint16( buf ); return l; } -const char* tbufReadString(SBuffer* buf, size_t* len) { - size_t l = tbufReadLength(buf); - char* ret = buf->data + buf->pos; - tbufSkip(buf, l + 1); - ret[l] = 0; // ensure the string end with '\0' - if (len != NULL) { +const char* tbufReadString( SBufferReader* buf, size_t* len ) { + size_t l = tbufReadLength( buf ); + char* ret = buf->data + buf->pos; + tbufSkip( buf, l + 1 ); + if( ret[l] != 0 ) { + THROW( TSDB_CODE_MEMORY_CORRUPTED ); + } + if( len != NULL ) { *len = l; } return ret; } -size_t tbufReadToString(SBuffer* buf, char* dst, size_t size) { - assert(dst != NULL); - size_t len; - const char* str = tbufReadString(buf, &len); +size_t tbufReadToString( SBufferReader* buf, char* dst, size_t size ) { + assert( dst != NULL ); + size_t len; + const char* str = tbufReadString( buf, &len ); if (len >= size) { len = size - 1; } - memcpy(dst, str, len); + memcpy( dst, str, len ); dst[len] = 0; return len; } -const char* tbufReadBinary(SBuffer* buf, size_t *len) { - size_t l = tbufReadLength(buf); +const char* tbufReadBinary( SBufferReader* buf, size_t *len ) { + size_t l = tbufReadLength( buf ); char* ret = buf->data + buf->pos; - tbufSkip(buf, l); - if (len != NULL) { + tbufSkip( buf, l ); + if( len != NULL ) { *len = l; } return ret; } -size_t tbufReadToBinary(SBuffer* buf, void* dst, size_t size) { - assert(dst != NULL); - size_t len; - const char* data = tbufReadBinary(buf, &len); - if (len >= size) { +size_t tbufReadToBinary( SBufferReader* buf, void* dst, size_t size ) { + assert( dst != NULL ); + size_t len; + const char* data = tbufReadBinary( buf, &len ); + if( len >= size ) { len = size; } - memcpy(dst, data, len); + memcpy( dst, data, len ); return len; } -//////////////////////////////////////////////////////////////////////////////// -// write functions +bool tbufReadBool( SBufferReader* buf ) { + bool ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + return ret; +} -void tbufBeginWrite(SBuffer* buf) { +char tbufReadChar( SBufferReader* buf ) { + char ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + return ret; +} + +int8_t tbufReadInt8( SBufferReader* buf ) { + int8_t ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + return ret; +} + +uint8_t tbufReadUint8( SBufferReader* buf ) { + uint8_t ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + return ret; +} + +int16_t tbufReadInt16( SBufferReader* buf ) { + int16_t ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + if( buf->endian ) { + return (int16_t)ntohs( ret ); + } + return ret; +} + +uint16_t tbufReadUint16( SBufferReader* buf ) { + uint16_t ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + if( buf->endian ) { + return ntohs( ret ); + } + return ret; +} + +int32_t tbufReadInt32( SBufferReader* buf ) { + int32_t ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + if( buf->endian ) { + return (int32_t)ntohl( ret ); + } + return ret; +} + +uint32_t tbufReadUint32( SBufferReader* buf ) { + uint32_t ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + if( buf->endian ) { + return ntohl( ret ); + } + return ret; +} + +int64_t tbufReadInt64( SBufferReader* buf ) { + int64_t ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + if( buf->endian ) { + return (int64_t)htobe64( ret ); // TODO: ntohll + } + return ret; +} + +uint64_t tbufReadUint64( SBufferReader* buf ) { + uint64_t ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + if( buf->endian ) { + return htobe64( ret ); // TODO: ntohll + } + return ret; +} + +float tbufReadFloat( SBufferReader* buf ) { + uint32_t ret = tbufReadUint32( buf ); + return *(float*)( &ret ); +} + +double tbufReadDouble(SBufferReader* buf) { + uint64_t ret = tbufReadUint64( buf ); + return *(double*)( &ret ); +} + +//////////////////////////////////////////////////////////////////////////////// +// writer functions + +void tbufCloseWriter( SBufferWriter* buf ) { + (*buf->allocator)( buf->data, 0 ); buf->data = NULL; buf->pos = 0; buf->size = 0; } -void tbufEnsureCapacity(SBuffer* buf, size_t size) { +void tbufEnsureCapacity( SBufferWriter* buf, size_t size ) { size += buf->pos; - if (size > buf->size) { + if( size > buf->size ) { size_t nsize = size + buf->size; - char* data = (*buf->allocator)(buf->data, nsize); - if (data == NULL) { - // TODO: handle client out of memory + char* data = (*buf->allocator)( buf->data, nsize ); + // TODO: the exception should be thrown by the allocator function + if( data == NULL ) { THROW( TSDB_CODE_SERV_OUT_OF_MEMORY ); } buf->data = data; @@ -160,279 +211,189 @@ void tbufEnsureCapacity(SBuffer* buf, size_t size) { } } -size_t tbufReserve(SBuffer* buf, size_t size) { - tbufEnsureCapacity(buf, size); - return tbufSeekTo(buf, buf->pos + size); +size_t tbufReserve( SBufferWriter* buf, size_t size ) { + tbufEnsureCapacity( buf, size ); + size_t old = buf->pos; + buf->pos += size; + return old; } -char* tbufGetData(SBuffer* buf, bool takeOver) { +char* tbufGetData( SBufferWriter* buf, bool takeOver ) { char* ret = buf->data; - if (takeOver) { + if( takeOver ) { buf->pos = 0; buf->size = 0; buf->data = NULL; } - return ret; } -void tbufWrite(SBuffer* buf, const void* data, size_t size) { - assert(data != NULL); - tbufEnsureCapacity(buf, size); - memcpy(buf->data + buf->pos, data, size); +void tbufWrite( SBufferWriter* buf, const void* data, size_t size ) { + assert( data != NULL ); + tbufEnsureCapacity( buf, size ); + memcpy( buf->data + buf->pos, data, size ); buf->pos += size; } -void tbufWriteAt(SBuffer* buf, size_t pos, const void* data, size_t size) { - assert(data != NULL); +void tbufWriteAt( SBufferWriter* buf, size_t pos, const void* data, size_t size ) { + assert( data != NULL ); // this function can only be called to fill the gap on previous writes, // so 'pos + size <= buf->pos' must be true - assert(pos + size <= buf->pos); - memcpy(buf->data + pos, data, size); + assert( pos + size <= buf->pos ); + memcpy( buf->data + pos, data, size ); } -static void tbufWriteLength(SBuffer* buf, size_t len) { +static void tbufWriteLength( SBufferWriter* buf, size_t len ) { // maximum length is 65535, if larger length is required // this function and the corresponding read function need to be // revised. - assert(len <= 0xffff); - tbufWriteUint16(buf, (uint16_t)len); + assert( len <= 0xffff ); + tbufWriteUint16( buf, (uint16_t)len ); } -void tbufWriteStringLen(SBuffer* buf, const char* str, size_t len) { - tbufWriteLength(buf, len); - tbufWrite(buf, str, len); - tbufWriteChar(buf, '\0'); +void tbufWriteStringLen( SBufferWriter* buf, const char* str, size_t len ) { + tbufWriteLength( buf, len ); + tbufWrite( buf, str, len ); + tbufWriteChar( buf, '\0' ); } -void tbufWriteString(SBuffer* buf, const char* str) { - tbufWriteStringLen(buf, str, strlen(str)); +void tbufWriteString( SBufferWriter* buf, const char* str ) { + tbufWriteStringLen( buf, str, strlen(str) ); } -void tbufWriteBinary(SBuffer* buf, const void* data, size_t len) { - tbufWriteLength(buf, len); - tbufWrite(buf, data, len); +void tbufWriteBinary( SBufferWriter* buf, const void* data, size_t len ) { + tbufWriteLength( buf, len ); + tbufWrite( buf, data, len ); } -//////////////////////////////////////////////////////////////////////////////// -// read / write functions for primitive types - -bool tbufReadBool(SBuffer* buf) { - bool ret; - tbufReadToBuffer(buf, &ret, sizeof(ret)); - return ret; +void tbufWriteBool( SBufferWriter* buf, bool data ) { + tbufWrite( buf, &data, sizeof(data) ); } -void tbufWriteBool(SBuffer* buf, bool data) { - tbufWrite(buf, &data, sizeof(data)); +void tbufWriteBoolAt( SBufferWriter* buf, size_t pos, bool data ) { + tbufWriteAt( buf, pos, &data, sizeof(data) ); } -void tbufWriteBoolAt(SBuffer* buf, size_t pos, bool data) { - tbufWriteAt(buf, pos, &data, sizeof(data)); +void tbufWriteChar( SBufferWriter* buf, char data ) { + tbufWrite( buf, &data, sizeof(data) ); } -char tbufReadChar(SBuffer* buf) { - char ret; - tbufReadToBuffer(buf, &ret, sizeof(ret)); - return ret; +void tbufWriteCharAt( SBufferWriter* buf, size_t pos, char data ) { + tbufWriteAt( buf, pos, &data, sizeof(data) ); } -void tbufWriteChar(SBuffer* buf, char data) { - tbufWrite(buf, &data, sizeof(data)); +void tbufWriteInt8( SBufferWriter* buf, int8_t data ) { + tbufWrite( buf, &data, sizeof(data) ); } -void tbufWriteCharAt(SBuffer* buf, size_t pos, char data) { - tbufWriteAt(buf, pos, &data, sizeof(data)); +void tbufWriteInt8At( SBufferWriter* buf, size_t pos, int8_t data ) { + tbufWriteAt( buf, pos, &data, sizeof(data) ); } -int8_t tbufReadInt8(SBuffer* buf) { - int8_t ret; - tbufReadToBuffer(buf, &ret, sizeof(ret)); - return ret; +void tbufWriteUint8( SBufferWriter* buf, uint8_t data ) { + tbufWrite( buf, &data, sizeof(data) ); } -void tbufWriteInt8(SBuffer* buf, int8_t data) { - tbufWrite(buf, &data, sizeof(data)); +void tbufWriteUint8At( SBufferWriter* buf, size_t pos, uint8_t data ) { + tbufWriteAt( buf, pos, &data, sizeof(data) ); } -void tbufWriteInt8At(SBuffer* buf, size_t pos, int8_t data) { - tbufWriteAt(buf, pos, &data, sizeof(data)); -} - -uint8_t tbufReadUint8(SBuffer* buf) { - uint8_t ret; - tbufReadToBuffer(buf, &ret, sizeof(ret)); - return ret; -} - -void tbufWriteUint8(SBuffer* buf, uint8_t data) { - tbufWrite(buf, &data, sizeof(data)); -} - -void tbufWriteUint8At(SBuffer* buf, size_t pos, uint8_t data) { - tbufWriteAt(buf, pos, &data, sizeof(data)); -} - -int16_t tbufReadInt16(SBuffer* buf) { - int16_t ret; - tbufReadToBuffer(buf, &ret, sizeof(ret)); - if (buf->endian) { - return (int16_t)ntohs(ret); +void tbufWriteInt16( SBufferWriter* buf, int16_t data ) { + if( buf->endian ) { + data = (int16_t)htons( data ); } - return ret; + tbufWrite( buf, &data, sizeof(data) ); } -void tbufWriteInt16(SBuffer* buf, int16_t data) { - if (buf->endian) { - data = (int16_t)htons(data); +void tbufWriteInt16At( SBufferWriter* buf, size_t pos, int16_t data ) { + if( buf->endian ) { + data = (int16_t)htons( data ); } - tbufWrite(buf, &data, sizeof(data)); + tbufWriteAt( buf, pos, &data, sizeof(data) ); } -void tbufWriteInt16At(SBuffer* buf, size_t pos, int16_t data) { - if (buf->endian) { - data = (int16_t)htons(data); +void tbufWriteUint16( SBufferWriter* buf, uint16_t data ) { + if( buf->endian ) { + data = htons( data ); } - tbufWriteAt(buf, pos, &data, sizeof(data)); + tbufWrite( buf, &data, sizeof(data) ); } -uint16_t tbufReadUint16(SBuffer* buf) { - uint16_t ret; - tbufReadToBuffer(buf, &ret, sizeof(ret)); - if (buf->endian) { - return ntohs(ret); +void tbufWriteUint16At( SBufferWriter* buf, size_t pos, uint16_t data ) { + if( buf->endian ) { + data = htons( data ); } - return ret; + tbufWriteAt( buf, pos, &data, sizeof(data) ); } -void tbufWriteUint16(SBuffer* buf, uint16_t data) { - if (buf->endian) { - data = htons(data); +void tbufWriteInt32( SBufferWriter* buf, int32_t data ) { + if( buf->endian ) { + data = (int32_t)htonl( data ); } - tbufWrite(buf, &data, sizeof(data)); + tbufWrite( buf, &data, sizeof(data) ); } -void tbufWriteUint16At(SBuffer* buf, size_t pos, uint16_t data) { - if (buf->endian) { - data = htons(data); +void tbufWriteInt32At( SBufferWriter* buf, size_t pos, int32_t data ) { + if( buf->endian ) { + data = (int32_t)htonl( data ); } - tbufWriteAt(buf, pos, &data, sizeof(data)); + tbufWriteAt( buf, pos, &data, sizeof(data) ); } -int32_t tbufReadInt32(SBuffer* buf) { - int32_t ret; - tbufReadToBuffer(buf, &ret, sizeof(ret)); - if (buf->endian) { - return (int32_t)ntohl(ret); +void tbufWriteUint32( SBufferWriter* buf, uint32_t data ) { + if( buf->endian ) { + data = htonl( data ); } - return ret; + tbufWrite( buf, &data, sizeof(data) ); } -void tbufWriteInt32(SBuffer* buf, int32_t data) { - if (buf->endian) { - data = (int32_t)htonl(data); +void tbufWriteUint32At( SBufferWriter* buf, size_t pos, uint32_t data ) { + if( buf->endian ) { + data = htonl( data ); } - tbufWrite(buf, &data, sizeof(data)); + tbufWriteAt( buf, pos, &data, sizeof(data) ); } -void tbufWriteInt32At(SBuffer* buf, size_t pos, int32_t data) { - if (buf->endian) { - data = (int32_t)htonl(data); +void tbufWriteInt64( SBufferWriter* buf, int64_t data ) { + if( buf->endian ) { + data = (int64_t)htobe64( data ); } - tbufWriteAt(buf, pos, &data, sizeof(data)); + tbufWrite( buf, &data, sizeof(data) ); } -uint32_t tbufReadUint32(SBuffer* buf) { - uint32_t ret; - tbufReadToBuffer(buf, &ret, sizeof(ret)); - if (buf->endian) { - return ntohl(ret); +void tbufWriteInt64At( SBufferWriter* buf, size_t pos, int64_t data ) { + if( buf->endian ) { + data = (int64_t)htobe64( data ); } - return ret; + tbufWriteAt( buf, pos, &data, sizeof(data) ); } -void tbufWriteUint32(SBuffer* buf, uint32_t data) { - if (buf->endian) { - data = htonl(data); +void tbufWriteUint64( SBufferWriter* buf, uint64_t data ) { + if( buf->endian ) { + data = htobe64( data ); } - tbufWrite(buf, &data, sizeof(data)); + tbufWrite( buf, &data, sizeof(data) ); } -void tbufWriteUint32At(SBuffer* buf, size_t pos, uint32_t data) { - if (buf->endian) { - data = htonl(data); +void tbufWriteUint64At( SBufferWriter* buf, size_t pos, uint64_t data ) { + if( buf->endian ) { + data = htobe64( data ); } - tbufWriteAt(buf, pos, &data, sizeof(data)); + tbufWriteAt( buf, pos, &data, sizeof(data) ); } -int64_t tbufReadInt64(SBuffer* buf) { - int64_t ret; - tbufReadToBuffer(buf, &ret, sizeof(ret)); - if (buf->endian) { - return (int64_t)htobe64(ret); // TODO: ntohll - } - return ret; +void tbufWriteFloat( SBufferWriter* buf, float data ) { + tbufWriteUint32( buf, *(uint32_t*)(&data) ); } -void tbufWriteInt64(SBuffer* buf, int64_t data) { - if (buf->endian) { - data = (int64_t)htobe64(data); - } - tbufWrite(buf, &data, sizeof(data)); +void tbufWriteFloatAt( SBufferWriter* buf, size_t pos, float data ) { + tbufWriteUint32At( buf, pos, *(uint32_t*)(&data) ); } -void tbufWriteInt64At(SBuffer* buf, size_t pos, int64_t data) { - if (buf->endian) { - data = (int64_t)htobe64(data); - } - tbufWriteAt(buf, pos, &data, sizeof(data)); +void tbufWriteDouble( SBufferWriter* buf, double data ) { + tbufWriteUint64( buf, *(uint64_t*)(&data) ); } -uint64_t tbufReadUint64(SBuffer* buf) { - uint64_t ret; - tbufReadToBuffer(buf, &ret, sizeof(ret)); - if (buf->endian) { - return htobe64(ret); // TODO: ntohll - } - return ret; -} - -void tbufWriteUint64(SBuffer* buf, uint64_t data) { - if (buf->endian) { - data = htobe64(data); - } - tbufWrite(buf, &data, sizeof(data)); -} - -void tbufWriteUint64At(SBuffer* buf, size_t pos, uint64_t data) { - if (buf->endian) { - data = htobe64(data); - } - tbufWriteAt(buf, pos, &data, sizeof(data)); -} - -float tbufReadFloat(SBuffer* buf) { - uint32_t ret = tbufReadUint32(buf); - return *(float*)(&ret); -} - -void tbufWriteFloat(SBuffer* buf, float data) { - tbufWriteUint32(buf, *(uint32_t*)(&data)); -} - -void tbufWriteFloatAt(SBuffer* buf, size_t pos, float data) { - tbufWriteUint32At(buf, pos, *(uint32_t*)(&data)); -} - -double tbufReadDouble(SBuffer* buf) { - uint64_t ret = tbufReadUint64(buf); - return *(double*)(&ret); -} - -void tbufWriteDouble(SBuffer* buf, double data) { - tbufWriteUint64(buf, *(uint64_t*)(&data)); -} - -void tbufWriteDoubleAt(SBuffer* buf, size_t pos, double data) { - tbufWriteUint64At(buf, pos, *(uint64_t*)(&data)); +void tbufWriteDoubleAt( SBufferWriter* buf, size_t pos, double data ) { + tbufWriteUint64At( buf, pos, *(uint64_t*)(&data) ); } From 241f7a2239482f74cf7f00ba668b16192976eb25 Mon Sep 17 00:00:00 2001 From: localvar Date: Mon, 27 Apr 2020 10:23:07 +0800 Subject: [PATCH 12/15] TD-153: fix bugs --- src/util/inc/exception.h | 57 +++++++++++++++++++++++----------------- src/util/inc/tbuffer.h | 54 ++++++++++++++++++++++++++++++++++++- src/util/src/exception.c | 16 ++++++++--- src/util/src/tbuffer.c | 8 +++--- 4 files changed, 102 insertions(+), 33 deletions(-) diff --git a/src/util/inc/exception.h b/src/util/inc/exception.h index 32e2fcb61b..41f01d68dd 100644 --- a/src/util/inc/exception.h +++ b/src/util/inc/exception.h @@ -52,25 +52,6 @@ typedef struct SCleanupAction { } arg1, arg2; } SCleanupAction; -void cleanupPush_void_ptr_ptr ( bool failOnly, void* func, void* arg1, void* arg2 ); -void cleanupPush_void_ptr_bool ( bool failOnly, void* func, void* arg1, bool arg2 ); -void cleanupPush_void_ptr ( bool failOnly, void* func, void* arg ); -void cleanupPush_int_int ( bool failOnly, void* func, int arg ); -void cleanupPush_void ( bool failOnly, void* func ); - -int32_t cleanupGetActionCount(); -void cleanupExecute( int32_t anchor, bool failed ); - -#define CLEANUP_PUSH_VOID_PTR_PTR( failOnly, func, arg1, arg2 ) cleanupPush_void_ptr_ptr( (failOnly), (void*)(func), (void*)(arg1), (void*)(arg2) ) -#define CLEANUP_PUSH_VOID_PTR_BOOL( failOnly, func, arg1, arg2 ) cleanupPush_void_ptr_bool( (failOnly), (void*)(func), (void*)(arg1), (bool)(arg2) ) -#define CLEANUP_PUSH_VOID_PTR( failOnly, func, arg ) cleanupPush_void_ptr( (failOnly), (void*)(func), (void*)(arg) ) -#define CLEANUP_PUSH_INT_INT( failOnly, func, arg ) cleanupPush_void_ptr( (failOnly), (void*)(func), (int)(arg) ) -#define CLEANUP_PUSH_VOID( failOnly, func ) cleanupPush_void( (failOnly), (void*)(func) ) -#define CLEANUP_PUSH_FREE( failOnly, arg ) cleanupPush_void_ptr( (failOnly), free, (void*)(arg) ) -#define CLEANUP_PUSH_CLOSE( failOnly, arg ) cleanupPush_int_int( (failOnly), close, (int)(arg) ) - -#define CLEANUP_CREATE_ANCHOR() int32_t cleanupAnchor = cleanupGetActionCount() -#define CLEANUP_EXECUTE( failed ) cleanupExecute( cleanupAnchor, (failed) ) /* * exception hander registration @@ -84,29 +65,57 @@ typedef struct SExceptionNode { SCleanupAction* cleanupActions; } SExceptionNode; +//////////////////////////////////////////////////////////////////////////////// +// functions & macros for auto-cleanup + +void cleanupPush_void_ptr_ptr ( bool failOnly, void* func, void* arg1, void* arg2 ); +void cleanupPush_void_ptr_bool ( bool failOnly, void* func, void* arg1, bool arg2 ); +void cleanupPush_void_ptr ( bool failOnly, void* func, void* arg ); +void cleanupPush_int_int ( bool failOnly, void* func, int arg ); +void cleanupPush_void ( bool failOnly, void* func ); + +int32_t cleanupGetActionCount(); +void cleanupExecuteTo( int32_t anchor, bool failed ); +void cleanupExecute( SExceptionNode* node, bool failed ); + +#define CLEANUP_PUSH_VOID_PTR_PTR( failOnly, func, arg1, arg2 ) cleanupPush_void_ptr_ptr( (failOnly), (void*)(func), (void*)(arg1), (void*)(arg2) ) +#define CLEANUP_PUSH_VOID_PTR_BOOL( failOnly, func, arg1, arg2 ) cleanupPush_void_ptr_bool( (failOnly), (void*)(func), (void*)(arg1), (bool)(arg2) ) +#define CLEANUP_PUSH_VOID_PTR( failOnly, func, arg ) cleanupPush_void_ptr( (failOnly), (void*)(func), (void*)(arg) ) +#define CLEANUP_PUSH_INT_INT( failOnly, func, arg ) cleanupPush_void_ptr( (failOnly), (void*)(func), (int)(arg) ) +#define CLEANUP_PUSH_VOID( failOnly, func ) cleanupPush_void( (failOnly), (void*)(func) ) +#define CLEANUP_PUSH_FREE( failOnly, arg ) cleanupPush_void_ptr( (failOnly), free, (void*)(arg) ) +#define CLEANUP_PUSH_CLOSE( failOnly, arg ) cleanupPush_int_int( (failOnly), close, (int)(arg) ) + +#define CLEANUP_GET_ANCHOR() cleanupGetActionCount() +#define CLEANUP_EXECUTE_TO( anchor, failed ) cleanupExecuteTo( (anchor), (failed) ) + + +//////////////////////////////////////////////////////////////////////////////// +// functions & macros for exception handling + void exceptionPushNode( SExceptionNode* node ); int32_t exceptionPopNode(); void exceptionThrow( int code ); #define TRY(maxCleanupActions) do { \ SExceptionNode exceptionNode = { 0 }; \ - SDeferedOperation cleanupActions[maxCleanupActions > 0 ? maxCleanupActions : 1]; \ - exceptionNode.maxCleanupAction = maxCleanupActions > 0 ? maxDefered : 1; \ + SCleanupAction cleanupActions[(maxCleanupActions) > 0 ? (maxCleanupActions) : 1]; \ + exceptionNode.maxCleanupAction = (maxCleanupActions) > 0 ? (maxCleanupActions) : 1; \ exceptionNode.cleanupActions = cleanupActions; \ - int32_t cleanupAnchor = 0; \ exceptionPushNode( &exceptionNode ); \ int caughtException = setjmp( exceptionNode.jb ); \ if( caughtException == 0 ) #define CATCH( code ) int code = exceptionPopNode(); \ - if( caughtEexception == 1 ) + if( caughtException == 1 ) #define FINALLY( code ) int code = exceptionPopNode(); #define END_TRY } while( 0 ); #define THROW( x ) exceptionThrow( (x) ) -#define CAUGHT_EXCEPTION() ((bool)(caughtEexception == 1)) +#define CAUGHT_EXCEPTION() ((bool)(caughtException == 1)) +#define CLEANUP_EXECUTE() cleanupExecute( &exceptionNode, CAUGHT_EXCEPTION() ) #ifdef __cplusplus } diff --git a/src/util/inc/tbuffer.h b/src/util/inc/tbuffer.h index 103b3710cf..8f3f7f777e 100644 --- a/src/util/inc/tbuffer.h +++ b/src/util/inc/tbuffer.h @@ -23,6 +23,58 @@ extern "C" { #endif +//////////////////////////////////////////////////////////////////////////////// +// usage example +/* +#include +#include "exception.h" + +int main( int argc, char** argv ) { + SBufferWriter bw = tbufInitWriter( NULL, false ); + + TRY( 1 ) { + //--------------------- write ------------------------ + // reserve 1024 bytes for the buffer to improve performance + tbufEnsureCapacity( &bw, 1024 ); + + // reserve space for the interger count + size_t pos = tbufReserve( &bw, sizeof(int32_t) ); + // write 5 integers to the buffer + for( int i = 0; i < 5; i++) { + tbufWriteInt32( &bw, i ); + } + // write the integer count to buffer at reserved position + tbufWriteInt32At( &bw, pos, 5 ); + + // write a string to the buffer + tbufWriteString( &bw, "this is a string.\n" ); + // acquire the result and close the write buffer + size_t size = tbufTell( &bw ); + char* data = tbufGetData( &bw, false ); + + //------------------------ read ----------------------- + SBufferReader br = tbufInitReader( data, size, false ); + // read & print out all integers + int32_t count = tbufReadInt32( &br ); + for( int i = 0; i < count; i++ ) { + printf( "%d\n", tbufReadInt32(&br) ); + } + // read & print out a string + puts( tbufReadString(&br, NULL) ); + // try read another integer, this result in an error as there no this integer + tbufReadInt32( &br ); + printf( "you should not see this message.\n" ); + } CATCH( code ) { + printf( "exception code is: %d, you will see this message after print out 5 integers and a string.\n", code ); + // throw it again and the exception will be caught in main + THROW( code ); + } END_TRY + + tbufCloseWriter( &bw ); + return 0; +} +*/ + typedef struct { bool endian; const char* data; @@ -51,7 +103,7 @@ typedef struct { size_t tbufSkip( SBufferReader* buf, size_t size ); -char* tbufRead( SBufferReader* buf, size_t size ); +const char* tbufRead( SBufferReader* buf, size_t size ); void tbufReadToBuffer( SBufferReader* buf, void* dst, size_t size ); const char* tbufReadString( SBufferReader* buf, size_t* len ); size_t tbufReadToString( SBufferReader* buf, char* dst, size_t size ); diff --git a/src/util/src/exception.c b/src/util/src/exception.c index 27cf6fbcd6..7f8f91c784 100644 --- a/src/util/src/exception.c +++ b/src/util/src/exception.c @@ -114,11 +114,19 @@ int32_t cleanupGetActionCount() { } -void cleanupExecute( int32_t anchor, bool failed ) { - while( expList->numCleanupAction > anchor ) { - --expList->numCleanupAction; - SCleanupAction *ca = expList->cleanupActions + expList->numCleanupAction; +static void doExecuteCleanup( SExceptionNode* node, int32_t anchor, bool failed ) { + while( node->numCleanupAction > anchor ) { + --node->numCleanupAction; + SCleanupAction *ca = node->cleanupActions + node->numCleanupAction; if( failed || !(ca->failOnly) ) wrappers[ca->wrapper]( ca ); } } + +void cleanupExecuteTo( int32_t anchor, bool failed ) { + doExecuteCleanup( expList, anchor, failed ); +} + +void cleanupExecute( SExceptionNode* node, bool failed ) { + doExecuteCleanup( node, 0, failed ); +} \ No newline at end of file diff --git a/src/util/src/tbuffer.c b/src/util/src/tbuffer.c index b2ded0203e..3b4cc74cc3 100644 --- a/src/util/src/tbuffer.c +++ b/src/util/src/tbuffer.c @@ -33,8 +33,8 @@ size_t tbufSkip(SBufferReader* buf, size_t size) { return old; } -char* tbufRead( SBufferReader* buf, size_t size ) { - char* ret = buf->data + buf->pos; +const char* tbufRead( SBufferReader* buf, size_t size ) { + const char* ret = buf->data + buf->pos; tbufSkip( buf, size ); return ret; } @@ -55,7 +55,7 @@ static size_t tbufReadLength( SBufferReader* buf ) { const char* tbufReadString( SBufferReader* buf, size_t* len ) { size_t l = tbufReadLength( buf ); - char* ret = buf->data + buf->pos; + const char* ret = buf->data + buf->pos; tbufSkip( buf, l + 1 ); if( ret[l] != 0 ) { THROW( TSDB_CODE_MEMORY_CORRUPTED ); @@ -80,7 +80,7 @@ size_t tbufReadToString( SBufferReader* buf, char* dst, size_t size ) { const char* tbufReadBinary( SBufferReader* buf, size_t *len ) { size_t l = tbufReadLength( buf ); - char* ret = buf->data + buf->pos; + const char* ret = buf->data + buf->pos; tbufSkip( buf, l ); if( len != NULL ) { *len = l; From 036695d45955d6d7f0a6112f613d40ac518db241 Mon Sep 17 00:00:00 2001 From: localvar Date: Mon, 27 Apr 2020 14:03:02 +0800 Subject: [PATCH 13/15] TD-153: make whole project compile --- src/client/inc/tscUtil.h | 3 +- src/client/src/tscSQLParser.c | 26 ++++- src/client/src/tscUtil.c | 8 +- src/query/inc/qast.h | 5 +- src/query/src/qast.c | 184 +++++++++++++++++----------------- src/query/tests/astTest.cpp | 18 ++-- src/tsdb/src/tsdbRead.c | 37 +++++-- src/util/inc/tbuffer.h | 3 +- 8 files changed, 157 insertions(+), 127 deletions(-) diff --git a/src/client/inc/tscUtil.h b/src/client/inc/tscUtil.h index d46c32d73d..718dfcf475 100644 --- a/src/client/inc/tscUtil.h +++ b/src/client/inc/tscUtil.h @@ -25,6 +25,7 @@ extern "C" { */ #include "os.h" #include "tbuffer.h" +#include "exception.h" #include "qextbuffer.h" #include "taosdef.h" #include "tscSecondaryMerge.h" @@ -177,7 +178,7 @@ bool tscValidateColumnId(STableMetaInfo* pTableMetaInfo, int32_t colId); // get starter position of metric query condition (query on tags) in SSqlCmd.payload SCond* tsGetSTableQueryCond(STagCond* pCond, uint64_t uid); -void tsSetSTableQueryCond(STagCond* pTagCond, uint64_t uid, SBuffer* pBuf); +void tsSetSTableQueryCond(STagCond* pTagCond, uint64_t uid, SBufferWriter* bw); void tscTagCondCopy(STagCond* dest, const STagCond* src); void tscTagCondRelease(STagCond* pCond); diff --git a/src/client/src/tscSQLParser.c b/src/client/src/tscSQLParser.c index 6e16606695..5590ac5a01 100644 --- a/src/client/src/tscSQLParser.c +++ b/src/client/src/tscSQLParser.c @@ -1185,10 +1185,18 @@ int32_t parseSelectClause(SSqlCmd* pCmd, int32_t clauseIndex, tSQLExprList* pSel return invalidSqlErrMsg(pQueryInfo->msg, "invalid arithmetic expression in select clause"); } - SBuffer buf = exprTreeToBinary(pNode); + SBufferWriter bw = tbufInitWriter(NULL, false); + + TRY(0) { + exprTreeToBinary(&bw, pNode); + } CATCH(code) { + tbufCloseWriter(&bw); + UNUSED(code); + // TODO: other error handling + } END_TRY - size_t len = tbufTell(&buf); - char* c = tbufGetData(&buf, true); + size_t len = tbufTell(&bw); + char* c = tbufGetData(&bw, true); // set the serialized binary string as the parameter of arithmetic expression addExprParams(pExpr, c, TSDB_DATA_TYPE_BINARY, len, index.tableIndex); @@ -3751,7 +3759,15 @@ static int32_t getTagQueryCondExpr(SQueryInfo* pQueryInfo, SCondExpr* pCondExpr, SArray* colList = taosArrayInit(10, sizeof(SColIndex)); ret = exprTreeFromSqlExpr(&p, p1, NULL, pQueryInfo, colList); - SBuffer buf = exprTreeToBinary(p); + SBufferWriter bw = tbufInitWriter(NULL, false); + + TRY(0) { + exprTreeToBinary(&bw, p); + } CATCH(code) { + tbufCloseWriter(&bw); + UNUSED(code); + // TODO: more error handling + } END_TRY // add to source column list STableMetaInfo* pTableMetaInfo = tscGetMetaInfo(pQueryInfo, i); @@ -3765,7 +3781,7 @@ static int32_t getTagQueryCondExpr(SQueryInfo* pQueryInfo, SCondExpr* pCondExpr, addRequiredTagColumn(pTableMetaInfo, &index); } - tsSetSTableQueryCond(&pQueryInfo->tagCond, uid, &buf); + tsSetSTableQueryCond(&pQueryInfo->tagCond, uid, &bw); doCompactQueryExpr(pExpr); tSQLExprDestroy(p1); diff --git a/src/client/src/tscUtil.c b/src/client/src/tscUtil.c index 6b8b2b38b4..88ce13e560 100644 --- a/src/client/src/tscUtil.c +++ b/src/client/src/tscUtil.c @@ -47,18 +47,18 @@ SCond* tsGetSTableQueryCond(STagCond* pTagCond, uint64_t uid) { return NULL; } -void tsSetSTableQueryCond(STagCond* pTagCond, uint64_t uid, SBuffer* pBuf) { - if (tbufTell(pBuf) == 0) { +void tsSetSTableQueryCond(STagCond* pTagCond, uint64_t uid, SBufferWriter* bw) { + if (tbufTell(bw) == 0) { return; } SCond cond = { .uid = uid, - .len = tbufTell(pBuf), + .len = tbufTell(bw), .cond = NULL, }; - cond.cond = tbufGetData(pBuf, true); + cond.cond = tbufGetData(bw, true); if (pTagCond->pCond == NULL) { pTagCond->pCond = taosArrayInit(3, sizeof(SCond)); diff --git a/src/query/inc/qast.h b/src/query/inc/qast.h index 903d54a18f..6c997d5a36 100644 --- a/src/query/inc/qast.h +++ b/src/query/inc/qast.h @@ -90,9 +90,10 @@ void tSQLBinaryExprTrv(tExprNode *pExprs, SArray* res); uint8_t getBinaryExprOptr(SSQLToken *pToken); -SBuffer exprTreeToBinary(tExprNode* pExprTree); +void tExprNodeDestroy(tExprNode *pNode, void (*fp)(void *)); +void exprTreeToBinary(SBufferWriter* bw, tExprNode* pExprTree); -tExprNode* exprTreeFromBinary(const void* pBuf, size_t size); +tExprNode* exprTreeFromBinary(const void* data, size_t size); tExprNode* exprTreeFromTableName(const char* tbnameCond); #ifdef __cplusplus diff --git a/src/query/src/qast.c b/src/query/src/qast.c index fdcbeeeac0..500a5f1e49 100644 --- a/src/query/src/qast.c +++ b/src/query/src/qast.c @@ -31,6 +31,7 @@ #include "tskiplist.h" #include "queryLog.h" #include "tsdbMain.h" +#include "exception.h" /* * @@ -44,7 +45,6 @@ * */ static tExprNode *tExprNodeCreate(SSchema *pSchema, int32_t numOfCols, SSQLToken *pToken); -static void tExprNodeDestroy(tExprNode *pNode, void (*fp)(void *)); static tExprNode *createSyntaxTree(SSchema *pSchema, int32_t numOfCols, char *str, int32_t *i); static void destroySyntaxTree(tExprNode *); @@ -428,7 +428,7 @@ void tSQLBinaryExprToString(tExprNode *pExpr, char *dst, int32_t *len) { static void UNUSED_FUNC destroySyntaxTree(tExprNode *pNode) { tExprNodeDestroy(pNode, NULL); } -static void tExprNodeDestroy(tExprNode *pNode, void (*fp)(void *)) { +void tExprNodeDestroy(tExprNode *pNode, void (*fp)(void *)) { if (pNode == NULL) { return; } @@ -1023,104 +1023,116 @@ void tSQLBinaryExprTrv(tExprNode *pExprs, SArray* res) { } } -static void exprTreeToBinaryImpl(tExprNode* pExprTree, SBuffer* pBuf) { - tbufWrite(pBuf, &pExprTree->nodeType, sizeof(pExprTree->nodeType)); +static void exprTreeToBinaryImpl(SBufferWriter* bw, tExprNode* expr) { + tbufWriteUint8(bw, expr->nodeType); - if (pExprTree->nodeType == TSQL_NODE_VALUE) { - tVariant* pVal = pExprTree->pVal; + if (expr->nodeType == TSQL_NODE_VALUE) { + tVariant* pVal = expr->pVal; - tbufWrite(pBuf, &pVal->nType, sizeof(pVal->nType)); + tbufWriteUint32(bw, pVal->nType); if (pVal->nType == TSDB_DATA_TYPE_BINARY) { - tbufWrite(pBuf, &pVal->nLen, sizeof(pVal->nLen)); - tbufWrite(pBuf, pVal->pz, pVal->nLen); + tbufWriteInt32(bw, pVal->nLen); + tbufWrite(bw, pVal->pz, pVal->nLen); } else { - tbufWrite(pBuf, &pVal->pz, sizeof(pVal->i64Key)); + tbufWriteInt64(bw, pVal->i64Key); } - } else if (pExprTree->nodeType == TSQL_NODE_COL) { - SSchema* pSchema = pExprTree->pSchema; - tbufWrite(pBuf, &pSchema->colId, sizeof(pSchema->colId)); - tbufWrite(pBuf, &pSchema->bytes, sizeof(pSchema->bytes)); - tbufWrite(pBuf, &pSchema->type, sizeof(pSchema->type)); + } else if (expr->nodeType == TSQL_NODE_COL) { + SSchema* pSchema = expr->pSchema; + tbufWriteInt16(bw, pSchema->colId); + tbufWriteInt16(bw, pSchema->bytes); + tbufWriteUint8(bw, pSchema->type); + tbufWriteString(bw, pSchema->name); - int32_t len = strlen(pSchema->name); - tbufWriteStringLen(pBuf, pSchema->name, len); - - } else if (pExprTree->nodeType == TSQL_NODE_EXPR) { - tbufWrite(pBuf, &pExprTree->_node.optr, sizeof(pExprTree->_node.optr)); - tbufWrite(pBuf, &pExprTree->_node.hasPK, sizeof(pExprTree->_node.hasPK)); - - exprTreeToBinaryImpl(pExprTree->_node.pLeft, pBuf); - exprTreeToBinaryImpl(pExprTree->_node.pRight, pBuf); + } else if (expr->nodeType == TSQL_NODE_EXPR) { + tbufWriteUint8(bw, expr->_node.optr); + tbufWriteUint8(bw, expr->_node.hasPK); + exprTreeToBinaryImpl(bw, expr->_node.pLeft); + exprTreeToBinaryImpl(bw, expr->_node.pRight); } } -SBuffer exprTreeToBinary(tExprNode* pExprTree) { - SBuffer buf = {0}; - if (pExprTree == NULL) { - return buf; +void exprTreeToBinary(SBufferWriter* bw, tExprNode* expr) { + if (expr != NULL) { + exprTreeToBinaryImpl(bw, expr); } - - int32_t code = tbufBeginWrite(&buf); - if (code != 0) { - return buf; - } - - exprTreeToBinaryImpl(pExprTree, &buf); - return buf; } -static tExprNode* exprTreeFromBinaryImpl(SBuffer* pBuf) { - tExprNode* pExpr = calloc(1, sizeof(tExprNode)); - pExpr->nodeType = tbufReadUint8(pBuf); +// TODO: these three functions should be made global +static void* exception_calloc(size_t nmemb, size_t size) { + void* p = calloc(nmemb, size); + if (p == NULL) { + THROW(TSDB_CODE_SERV_OUT_OF_MEMORY); + } + return p; +} + +static void* exception_malloc(size_t size) { + void* p = malloc(size); + if (p == NULL) { + THROW(TSDB_CODE_SERV_OUT_OF_MEMORY); + } + return p; +} + +static char* exception_strdup(const char* str) { + char* p = strdup(str); + if (p == NULL) { + THROW(TSDB_CODE_SERV_OUT_OF_MEMORY); + } + return p; +} + + +static tExprNode* exprTreeFromBinaryImpl(SBufferReader* br) { + int32_t anchor = CLEANUP_GET_ANCHOR(); + + tExprNode* pExpr = exception_calloc(1, sizeof(tExprNode)); + CLEANUP_PUSH_VOID_PTR_PTR(true, tExprNodeDestroy, pExpr, NULL); + + pExpr->nodeType = tbufReadUint8(br); if (pExpr->nodeType == TSQL_NODE_VALUE) { - tVariant* pVal = calloc(1, sizeof(tVariant)); - if (pVal == NULL) { - // TODO: - } + tVariant* pVal = exception_calloc(1, sizeof(tVariant)); pExpr->pVal = pVal; - pVal->nType = tbufReadUint32(pBuf); + pVal->nType = tbufReadUint32(br); if (pVal->nType == TSDB_DATA_TYPE_BINARY) { - tbufReadToBuffer(pBuf, &pVal->nLen, sizeof(pVal->nLen)); + tbufReadToBuffer(br, &pVal->nLen, sizeof(pVal->nLen)); pVal->pz = calloc(1, pVal->nLen + 1); - tbufReadToBuffer(pBuf, pVal->pz, pVal->nLen); + tbufReadToBuffer(br, pVal->pz, pVal->nLen); } else { - pVal->i64Key = tbufReadInt64(pBuf); + pVal->i64Key = tbufReadInt64(br); } } else if (pExpr->nodeType == TSQL_NODE_COL) { - SSchema* pSchema = calloc(1, sizeof(SSchema)); - if (pSchema == NULL) { - // TODO: - } + SSchema* pSchema = exception_calloc(1, sizeof(SSchema)); pExpr->pSchema = pSchema; - pSchema->colId = tbufReadInt16(pBuf); - pSchema->bytes = tbufReadInt16(pBuf); - pSchema->type = tbufReadUint8(pBuf); - tbufReadToString(pBuf, pSchema->name, TSDB_COL_NAME_LEN); + pSchema->colId = tbufReadInt16(br); + pSchema->bytes = tbufReadInt16(br); + pSchema->type = tbufReadUint8(br); + tbufReadToString(br, pSchema->name, TSDB_COL_NAME_LEN); } else if (pExpr->nodeType == TSQL_NODE_EXPR) { - pExpr->_node.optr = tbufReadUint8(pBuf); - pExpr->_node.hasPK = tbufReadUint8(pBuf); - pExpr->_node.pLeft = exprTreeFromBinaryImpl(pBuf); - pExpr->_node.pRight = exprTreeFromBinaryImpl(pBuf); + pExpr->_node.optr = tbufReadUint8(br); + pExpr->_node.hasPK = tbufReadUint8(br); + pExpr->_node.pLeft = exprTreeFromBinaryImpl(br); + pExpr->_node.pRight = exprTreeFromBinaryImpl(br); assert(pExpr->_node.pLeft != NULL && pExpr->_node.pRight != NULL); } + CLEANUP_EXECUTE_TO(anchor, false); return pExpr; } -tExprNode* exprTreeFromBinary(const void* pBuf, size_t size) { +tExprNode* exprTreeFromBinary(const void* data, size_t size) { if (size == 0) { return NULL; } - SBuffer rbuf = {0}; - tbufBeginRead(&rbuf, pBuf, size); - return exprTreeFromBinaryImpl(&rbuf); + SBufferReader br = tbufInitReader(data, size, false); + return exprTreeFromBinaryImpl(&br); } tExprNode* exprTreeFromTableName(const char* tbnameCond) { @@ -1128,23 +1140,18 @@ tExprNode* exprTreeFromTableName(const char* tbnameCond) { return NULL; } - tExprNode* expr = calloc(1, sizeof(tExprNode)); - if (expr == NULL) { - // TODO: - } + int32_t anchor = CLEANUP_GET_ANCHOR(); + + tExprNode* expr = exception_calloc(1, sizeof(tExprNode)); + CLEANUP_PUSH_VOID_PTR_PTR(true, tExprNodeDestroy, expr, NULL); + expr->nodeType = TSQL_NODE_EXPR; - tExprNode* left = calloc(1, sizeof(tExprNode)); - if (left == NULL) { - // TODO: - } + tExprNode* left = exception_calloc(1, sizeof(tExprNode)); expr->_node.pLeft = left; left->nodeType = TSQL_NODE_COL; - SSchema* pSchema = calloc(1, sizeof(SSchema)); - if (pSchema == NULL) { - // TODO: - } + SSchema* pSchema = exception_calloc(1, sizeof(SSchema)); left->pSchema = pSchema; pSchema->type = TSDB_DATA_TYPE_BINARY; @@ -1152,36 +1159,24 @@ tExprNode* exprTreeFromTableName(const char* tbnameCond) { strcpy(pSchema->name, TSQL_TBNAME_L); pSchema->colId = -1; - tExprNode* right = calloc(1, sizeof(tExprNode)); - if (right == NULL) { - // TODO - } + tExprNode* right = exception_calloc(1, sizeof(tExprNode)); expr->_node.pRight = right; if (strncmp(tbnameCond, QUERY_COND_REL_PREFIX_LIKE, QUERY_COND_REL_PREFIX_LIKE_LEN) == 0) { right->nodeType = TSQL_NODE_VALUE; expr->_node.optr = TSDB_RELATION_LIKE; - tVariant* pVal = calloc(1, sizeof(tVariant)); - if (pVal == NULL) { - // TODO: - } + tVariant* pVal = exception_calloc(1, sizeof(tVariant)); right->pVal = pVal; - pVal->nType = TSDB_DATA_TYPE_BINARY; size_t len = strlen(tbnameCond + QUERY_COND_REL_PREFIX_LIKE_LEN) + 1; - pVal->pz = malloc(len); - if (pVal->pz == NULL) { - // TODO: - } + pVal->pz = exception_malloc(len); memcpy(pVal->pz, tbnameCond + QUERY_COND_REL_PREFIX_LIKE_LEN, len); + pVal->nType = TSDB_DATA_TYPE_BINARY; pVal->nLen = (int32_t)len; } else if (strncmp(tbnameCond, QUERY_COND_REL_PREFIX_IN, QUERY_COND_REL_PREFIX_IN_LEN) == 0) { right->nodeType = TSQL_NODE_VALUE; expr->_node.optr = TSDB_RELATION_IN; - tVariant* pVal = calloc(1, sizeof(tVariant)); - if (pVal == NULL) { - // TODO: - } + tVariant* pVal = exception_calloc(1, sizeof(tVariant)); right->pVal = pVal; pVal->nType = TSDB_DATA_TYPE_ARRAY; pVal->arr = taosArrayInit(2, sizeof(char*)); @@ -1192,7 +1187,7 @@ tExprNode* exprTreeFromTableName(const char* tbnameCond) { cond = e + 1; } else if (*e == ',') { size_t len = e - cond + 1; - char* p = malloc( len ); + char* p = exception_malloc( len ); memcpy(p, cond, len); p[len - 1] = 0; cond += len; @@ -1201,12 +1196,13 @@ tExprNode* exprTreeFromTableName(const char* tbnameCond) { } if (*cond != 0) { - char* p = strdup( cond ); + char* p = exception_strdup( cond ); taosArrayPush(pVal->arr, &p); } taosArraySortString(pVal->arr); } + CLEANUP_EXECUTE_TO(anchor, false); return expr; } \ No newline at end of file diff --git a/src/query/tests/astTest.cpp b/src/query/tests/astTest.cpp index 6a78cfbe53..dee85ef630 100644 --- a/src/query/tests/astTest.cpp +++ b/src/query/tests/astTest.cpp @@ -550,11 +550,12 @@ tExprNode* createExpr2() { void exprSerializeTest1() { tExprNode* p1 = createExpr1(); - SBuffer buf = exprTreeToBinary(p1); + SBufferWriter bw = tbufInitWriter(NULL, false); + exprTreeToBinary(&bw, p1); - size_t size = tbufTell(&buf); + size_t size = tbufTell(&bw); ASSERT_TRUE(size > 0); - char* b = tbufGetData(&buf, false); + char* b = tbufGetData(&bw, false); tExprNode* p2 = exprTreeFromBinary(b, size); ASSERT_EQ(p1->nodeType, p2->nodeType); @@ -581,16 +582,17 @@ void exprSerializeTest1() { tExprTreeDestroy(&p1, nullptr); tExprTreeDestroy(&p2, nullptr); - tbufClose(&buf, false); + tbufClose(&bw); } void exprSerializeTest2() { tExprNode* p1 = createExpr2(); - SBuffer buf = exprTreeToBinary(p1); + SBufferWriter bw = tbufInitWriter(NULL, false); + exprTreeToBinary(&bw, p1); - size_t size = tbufTell(&buf); + size_t size = tbufTell(&bw); ASSERT_TRUE(size > 0); - char* b = tbufGetData(&buf, false); + char* b = tbufGetData(&bw, false); tExprNode* p2 = exprTreeFromBinary(b, size); ASSERT_EQ(p1->nodeType, p2->nodeType); @@ -625,7 +627,7 @@ void exprSerializeTest2() { tExprTreeDestroy(&p1, nullptr); tExprTreeDestroy(&p2, nullptr); - tbufClose(&buf, false); + tbufClose(&bw); } } // namespace TEST(testCase, astTest) { diff --git a/src/tsdb/src/tsdbRead.c b/src/tsdb/src/tsdbRead.c index eb35be5383..bc9220dbc7 100644 --- a/src/tsdb/src/tsdbRead.c +++ b/src/tsdb/src/tsdbRead.c @@ -18,6 +18,7 @@ #include "talgo.h" #include "tutil.h" #include "tcompare.h" +#include "exception.h" #include "../../../query/inc/qast.h" // todo move to common module #include "../../../query/inc/tlosertree.h" // todo move to util module @@ -1473,21 +1474,35 @@ int32_t tsdbQueryByTagsCond( } int32_t ret = TSDB_CODE_SUCCESS; + tExprNode* expr = NULL; - tExprNode* expr = exprTreeFromTableName(tbnameCond); - tExprNode* tagExpr = exprTreeFromBinary(pTagCond, len); - if (tagExpr != NULL) { + TRY(32) { + expr = exprTreeFromTableName(tbnameCond); if (expr == NULL) { - expr = tagExpr; + expr = exprTreeFromBinary(pTagCond, len); } else { - tExprNode* tbnameExpr = expr; - expr = calloc(1, sizeof(tExprNode)); - expr->nodeType = TSQL_NODE_EXPR; - expr->_node.optr = tagNameRelType; - expr->_node.pLeft = tagExpr; - expr->_node.pRight = tbnameExpr; + CLEANUP_PUSH_VOID_PTR_PTR(true, tExprNodeDestroy, expr, NULL); + tExprNode* tagExpr = exprTreeFromBinary(pTagCond, len); + if (tagExpr != NULL) { + CLEANUP_PUSH_VOID_PTR_PTR(true, tExprNodeDestroy, tagExpr, NULL); + tExprNode* tbnameExpr = expr; + expr = calloc(1, sizeof(tExprNode)); + if (expr == NULL) { + THROW( TSDB_CODE_SERV_OUT_OF_MEMORY ); + } + expr->nodeType = TSQL_NODE_EXPR; + expr->_node.optr = tagNameRelType; + expr->_node.pLeft = tagExpr; + expr->_node.pRight = tbnameExpr; + } } - } + CLEANUP_EXECUTE(); + + } CATCH( code ) { + CLEANUP_EXECUTE(); + ret = code; + // TODO: more error handling + } END_TRY doQueryTableList(pSTable, res, expr); pGroupInfo->numOfTables = taosArrayGetSize(res); diff --git a/src/util/inc/tbuffer.h b/src/util/inc/tbuffer.h index 8f3f7f777e..e2bdb815d7 100644 --- a/src/util/inc/tbuffer.h +++ b/src/util/inc/tbuffer.h @@ -66,8 +66,6 @@ int main( int argc, char** argv ) { printf( "you should not see this message.\n" ); } CATCH( code ) { printf( "exception code is: %d, you will see this message after print out 5 integers and a string.\n", code ); - // throw it again and the exception will be caught in main - THROW( code ); } END_TRY tbufCloseWriter( &bw ); @@ -92,6 +90,7 @@ typedef struct { //////////////////////////////////////////////////////////////////////////////// // common functions & macros for both reader & writer + #define tbufTell( buf ) ((buf)->pos) From 24cd95250ef1893338d367a37296d3a7d6374361 Mon Sep 17 00:00:00 2001 From: Shuduo Sang Date: Mon, 27 Apr 2020 17:21:04 +0800 Subject: [PATCH 14/15] add date.py and binary.py to 2.0 --- tests/pytest/fulltest.sh | 2 + tests/pytest/insert/binary.py | 68 ++++++++++++ tests/pytest/insert/date.py | 193 ++++++++++++++++++++++++++++++++++ tests/pytest/util/sql.py | 25 +++-- 4 files changed, 277 insertions(+), 11 deletions(-) create mode 100644 tests/pytest/insert/binary.py create mode 100644 tests/pytest/insert/date.py diff --git a/tests/pytest/fulltest.sh b/tests/pytest/fulltest.sh index 1b879b3cc6..ec06eb38ef 100755 --- a/tests/pytest/fulltest.sh +++ b/tests/pytest/fulltest.sh @@ -7,6 +7,8 @@ python3 ./test.py $1 -f insert/bool.py python3 ./test.py $1 -f insert/double.py python3 ./test.py $1 -f insert/smallint.py python3 ./test.py $1 -f insert/tinyint.py +python3 ./test.py $1 -f insert/date.py +python3 ./test.py $1 -f insert/binary.py python3 ./test.py $1 -f import_merge/importBlock1HO.py python3 ./test.py $1 -f import_merge/importBlock1HPO.py python3 ./test.py $1 -f import_merge/importBlock1H.py diff --git a/tests/pytest/insert/binary.py b/tests/pytest/insert/binary.py new file mode 100644 index 0000000000..9989865f96 --- /dev/null +++ b/tests/pytest/insert/binary.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- + +import sys +from util.log import * +from util.cases import * +from util.sql import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + tdSql.prepare() + + + tdLog.info('=============== step1') + tdLog.info('create table tb (ts timestamp, speed binary(5))') + tdSql.execute('create table tb (ts timestamp, speed binary(5))') + tdLog.info("insert into tb values (now, ) -x step1") + tdSql.error("insert into tb values (now, )") + tdLog.info('=============== step2') + tdLog.info("insert into tb values (now+1a, '1234')") + tdSql.execute("insert into tb values (now+1a, '1234')") + tdLog.info('select speed from tb order by ts desc') + tdSql.query('select speed from tb order by ts desc') + tdLog.info('tdSql.checkRow(1)') + tdSql.checkRows(1) + tdLog.info('tdSql.checkData(0, 0, 1234)') + tdSql.checkData(0, 0, 1234) + tdLog.info('=============== step3') + tdLog.info("insert into tb values (now+2a, '23456')") + tdSql.execute("insert into tb values (now+2a, '23456')") + tdLog.info('select speed from tb order by ts desc') + tdSql.query('select speed from tb order by ts desc') + tdLog.info('tdSql.checkRow(2)') + tdSql.checkRows(2) + tdLog.info('==> $data00') + tdLog.info('tdSql.checkData(0, 0, 23456)') + tdSql.checkData(0, 0, 23456) + tdLog.info('=============== step4') + tdLog.info("insert into tb values (now+3a, '345678')") + tdSql.error("insert into tb values (now+3a, '345678')") + tdLog.info("insert into tb values (now+3a, '34567')") + tdSql.execute("insert into tb values (now+3a, '34567')") + tdLog.info('select speed from tb order by ts desc') + tdSql.query('select speed from tb order by ts desc') + tdLog.info('tdSql.checkRow(3)') + tdSql.checkRows(3) + tdLog.info('==> $data00') + tdLog.info('tdSql.checkData(0, 0, 34567)') + tdSql.checkData(0, 0, 34567) + tdLog.info('drop database db') + tdSql.execute('drop database db') + tdLog.info('show databases') + tdSql.query('show databases') + tdLog.info('tdSql.checkRow(0)') + tdSql.checkRows(0) +# convert end + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/insert/date.py b/tests/pytest/insert/date.py new file mode 100644 index 0000000000..716d799cb0 --- /dev/null +++ b/tests/pytest/insert/date.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- + +import sys +from util.log import * +from util.cases import * +from util.sql import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + tdSql.prepare() + + # TSIM: system sh/stop_dnodes.sh + # TSIM: + # TSIM: system sh/ip.sh -i 1 -s up + # TSIM: system sh/deploy.sh -n dnode1 -m 192.168.0.1 -i 192.168.0.1 + # TSIM: system sh/cfg.sh -n dnode1 -c commitLog -v 0 + # TSIM: system sh/exec.sh -n dnode1 -s start + # TSIM: + # TSIM: sleep 3000 + # TSIM: sql connect + # TSIM: + # TSIM: $i = 0 + # TSIM: $dbPrefix = lm_da_db + # TSIM: $tbPrefix = lm_da_tb + # TSIM: $db = $dbPrefix . $i + # TSIM: $tb = $tbPrefix . $i + # TSIM: + # TSIM: print =============== step1 + tdLog.info('=============== step1') + # TSIM: sql create database $db + # TSIM: sql use $db + # TSIM: + # TSIM: sql create table $tb (ts timestamp, speed int) + tdLog.info("create table tb0 (ts timestamp, speed int)") + tdSql.execute('create table tb0 (ts timestamp, speed int)') + # TSIM: sql insert into $tb values ('2017-01-01 08:00:00.001', 1) + tdLog.info("insert into tb0 values ('2017-01-01 08:00:00.001', 1)") + tdSql.execute("insert into tb0 values ('2017-01-01 08:00:00.001', 1)") + # TSIM: sql select ts from $tb + tdLog.info('select ts from tb0') + tdSql.query('select ts from tb0') + # TSIM: if $rows != 1 then + tdLog.info('tdSql.checkRow(1)') + tdSql.checkRows(1) + # TSIM: return -1 + # TSIM: endi + # TSIM: if $data00 != @17-01-01 08:00:00.001@ then + tdLog.info("tdSql.checkData(0, 0, 17-01-01 08:00:00.001)") + expectedData = datetime.datetime.strptime( + "17-01-01 08:00:00.001", "%y-%m-%d %H:%M:%S.%f") + tdSql.checkData(0, 0, expectedData) + # TSIM: return -1 + # TSIM: endi + # TSIM: + # TSIM: print =============== step2 + tdLog.info('=============== step2') + # TSIM: sql insert into $tb values ('2017-08-28 00:23:46.429+ 1a', 2) + tdLog.info("insert into tb0 values ('2017-08-28 00:23:46.429+ 1a', 2)") + tdSql.execute( + "insert into tb0 values ('2017-08-28 00:23:46.429+ 1a', 2)") + # TSIM: #sql insert into $tb values ('2017-08-28 00:23:46cd .429', 2) + # TSIM: sql select ts from $tb + tdLog.info('select ts from tb0') + tdSql.query('select ts from tb0') + # TSIM: if $rows != 2 then + tdLog.info('tdSql.checkRow(2)') + tdSql.checkRows(2) + # TSIM: return -1 + # TSIM: endi + # TSIM: + # TSIM: print =============== step3 + tdLog.info('=============== step3') + # TSIM: #sql insert into $tb values ('1970-01-01 08:00:00.000', 3) + # TSIM: #sql insert into $tb values ('1970-01-01 08:00:00.000', 3) + # TSIM: sql select ts from $tb + tdLog.info('select ts from tb0') + tdSql.query('select ts from tb0') + # TSIM: if $rows != 2 then + tdLog.info('tdSql.checkRow(2)') + tdSql.checkRows(2) + # TSIM: return -1 + # TSIM: endi + # TSIM: + # TSIM: print =============== step4 + tdLog.info('=============== step4') + # TSIM: sql insert into $tb values(now, 4); + tdLog.info("insert into tb0 values(now, 4);") + tdSql.execute("insert into tb0 values(now, 4);") + # TSIM: sql insert into $tb values(now+1a, 5); + tdLog.info("insert into tb0 values(now+1a, 5);") + tdSql.execute("insert into tb0 values(now+1a, 5);") + # TSIM: sql insert into $tb values(now+1s, 6); + tdLog.info("insert into tb0 values(now+1s, 6);") + tdSql.execute("insert into tb0 values(now+1s, 6);") + # TSIM: sql insert into $tb values(now+1m, 7); + tdLog.info("insert into tb0 values(now+1m, 7);") + tdSql.execute("insert into tb0 values(now+1m, 7);") + # TSIM: sql insert into $tb values(now+1h, 8); + tdLog.info("insert into tb0 values(now+1h, 8);") + tdSql.execute("insert into tb0 values(now+1h, 8);") + # TSIM: sql insert into $tb values(now+1d, 9); + tdLog.info("insert into tb0 values(now+1d, 9);") + tdSql.execute("insert into tb0 values(now+1d, 9);") + # TSIM: sql_error insert into $tb values(now+3w, 10); + tdLog.info("insert into tb0 values(now+3w, 10);") + tdSql.error("insert into tb0 values(now+3w, 10);") + # TSIM: sql_error insert into $tb values(now+1n, 11); + tdLog.info("insert into tb0 values(now+1n, 11);") + tdSql.error("insert into tb0 values(now+1n, 11);") + # TSIM: sql_error insert into $tb values(now+1y, 12); + tdLog.info("insert into tb0 values(now+1y, 12);") + tdSql.error("insert into tb0 values(now+1y, 12);") + # TSIM: + # TSIM: print =============== step5 + tdLog.info('=============== step5') + # TSIM: sql_error insert into $tb values ('9999-12-31 213:59:59.999', + # 13) + tdLog.info("insert into tb0 values ('9999-12-31 213:59:59.999', 13)") + tdSql.error("insert into tb0 values ('9999-12-31 213:59:59.999', 13)") + # TSIM: sql select ts from $tb + tdLog.info('select ts from tb0') + tdSql.query('select ts from tb0') + # TSIM: print $rows + tdLog.info('$rows') + # TSIM: if $rows != 8 then + tdLog.info('tdSql.checkRow(8)') + tdSql.checkRows(8) + # TSIM: return -1 + # TSIM: endi + # TSIM: + # TSIM: print =============== step6 + tdLog.info('=============== step6') + # TSIM: sql_error insert into $tb values ('9999-12-99 23:59:59.999', + # 13) + tdLog.info("insert into tb0 values ('9999-12-99 23:59:59.999', 13)") + tdSql.error("insert into tb0 values ('9999-12-99 23:59:59.999', 13)") + # TSIM: + # TSIM: sql select ts from $tb + tdLog.info('select ts from tb0') + tdSql.query('select ts from tb0') + # TSIM: if $rows != 8 then + tdLog.info('tdSql.checkRow(8)') + tdSql.checkRows(8) + # TSIM: return -1 + # TSIM: endi + # TSIM: + # TSIM: print =============== step7 + tdLog.info('=============== step7') + # TSIM: $i = 1 + # TSIM: $tb = $tbPrefix . $i + # TSIM: sql create table $tb (ts timestamp, ts2 timestamp) + tdLog.info("create table tb1 (ts timestamp, ts2 timestamp)") + tdSql.execute('create table tb1 (ts timestamp, ts2 timestamp)') + # TSIM: + # TSIM: print =============== step8 + tdLog.info('=============== step8') + # TSIM: sql insert into $tb values (now, now) + tdLog.info("insert into tb1 values (now, now)") + tdSql.execute("insert into tb1 values (now, now)") + # TSIM: sql select * from $tb + tdLog.info('select * from tb1') + tdSql.query('select * from tb1') + # TSIM: if $rows != 1 then + tdLog.info('tdSql.checkRow(1)') + tdSql.checkRows(1) + # TSIM: return -1 + # TSIM: endi + # TSIM: + # TSIM: sql drop database $db + tdLog.info('drop database db') + tdSql.execute('drop database db') + # TSIM: sql show databases + tdLog.info('show databases') + tdSql.query('show databases') + # TSIM: if $rows != 0 then + tdLog.info('tdSql.checkRow(0)') + tdSql.checkRows(0) + # TSIM: return -1 + # TSIM: endi +# convert end + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/util/sql.py b/tests/pytest/util/sql.py index 15567ec3ca..a1f7dd2f64 100644 --- a/tests/pytest/util/sql.py +++ b/tests/pytest/util/sql.py @@ -44,7 +44,7 @@ class TDSql: except BaseException: expectErrNotOccured = False if expectErrNotOccured: - tdLog.exit("sql:%.40s, expect error not occured" % (sql)) + tdLog.exit("failed: sql:%.40s, expect error not occured" % (sql)) else: tdLog.info("sql:%.40s, expect error occured" % (sql)) @@ -71,28 +71,31 @@ class TDSql: def checkData(self, row, col, data): if row < 0: tdLog.exit( - "sql:%.40s, row:%d is smaller than zero" % + "failed: sql:%.40s, row:%d is smaller than zero" % (self.sql, row)) if col < 0: tdLog.exit( - "sql:%.40s, col:%d is smaller than zero" % + "failed: sql:%.40s, col:%d is smaller than zero" % (self.sql, col)) if row >= self.queryRows: tdLog.exit( - "sql:%.40s, row:%d is larger than queryRows:%d" % + "failed: sql:%.40s, row:%d is larger than queryRows:%d" % (self.sql, row, self.queryRows)) if col >= self.queryCols: tdLog.exit( - "sql:%.40s, col:%d is larger than queryRows:%d" % + "failed: sql:%.40s, col:%d is larger than queryRows:%d" % (self.sql, col, self.queryCols)) if self.queryResult[row][col] != data: tdLog.exit( - "sql:%.40s row:%d col:%d data:%s != expect:%s" % + "failed: sql:%.40s row:%d col:%d data:%s != expect:%s" % (self.sql, row, col, self.queryResult[row][col], data)) if data is None: tdLog.info("sql:%.40s, row:%d col:%d data:%s == expect:%s" % (self.sql, row, col, self.queryResult[row][col], data)) + elif isinstance(data, datetime.date): + tdLog.info("sql:%.40s, row:%d col:%d data:%s == expect:%s" % + (self.sql, row, col, self.queryResult[row][col], data)) else: tdLog.info("sql:%.40s, row:%d col:%d data:%s == expect:%d" % (self.sql, row, col, self.queryResult[row][col], data)) @@ -100,19 +103,19 @@ class TDSql: def getData(self, row, col): if row < 0: tdLog.exit( - "sql:%.40s, row:%d is smaller than zero" % + "failed: sql:%.40s, row:%d is smaller than zero" % (self.sql, row)) if col < 0: tdLog.exit( - "sql:%.40s, col:%d is smaller than zero" % + "failed: sql:%.40s, col:%d is smaller than zero" % (self.sql, col)) if row >= self.queryRows: tdLog.exit( - "sql:%.40s, row:%d is larger than queryRows:%d" % + "failed: sql:%.40s, row:%d is larger than queryRows:%d" % (self.sql, row, self.queryRows)) if col >= self.queryCols: tdLog.exit( - "sql:%.40s, col:%d is larger than queryRows:%d" % + "failed: sql:%.40s, col:%d is larger than queryRows:%d" % (self.sql, col, self.queryCols)) return self.queryResult[row][col] @@ -131,7 +134,7 @@ class TDSql: def checkAffectedRows(self, expectAffectedRows): if self.affectedRows != expectAffectedRows: - tdLog.exit("sql:%.40s, affectedRows:%d != expect:%d" % + tdLog.exit("failed: sql:%.40s, affectedRows:%d != expect:%d" % (self.sql, self.affectedRows, expectAffectedRows)) tdLog.info("sql:%.40s, affectedRows:%d == expect:%d" % (self.sql, self.affectedRows, expectAffectedRows)) From e8f33da26f1ffd45eeea79377a518ae2c8c70b5f Mon Sep 17 00:00:00 2001 From: slguan Date: Mon, 27 Apr 2020 17:49:22 +0800 Subject: [PATCH 15/15] [TD-184] handle bugs during wal synchronization --- src/vnode/src/vnodeWrite.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/vnode/src/vnodeWrite.c b/src/vnode/src/vnodeWrite.c index 6007379680..cfcc9cd847 100644 --- a/src/vnode/src/vnodeWrite.c +++ b/src/vnode/src/vnodeWrite.c @@ -58,7 +58,7 @@ int32_t vnodeProcessWrite(void *param1, int qtype, void *param2, void *item) { return TSDB_CODE_NOT_ACTIVE_VNODE; if (pVnode->syncCfg.replica > 1 && pVnode->role != TAOS_SYNC_ROLE_MASTER) - return TSDB_CODE_NO_MASTER; + return TSDB_CODE_NOT_READY; // assign version pVnode->version++;