Merge pull request #4379 from taosdata/xiaoping/add_test_case2
[TD-2109] <test> add test case
This commit is contained in:
commit
d782a83b9d
|
@ -0,0 +1,47 @@
|
||||||
|
import taos
|
||||||
|
import datetime
|
||||||
|
import random
|
||||||
|
import multiprocessing
|
||||||
|
|
||||||
|
def taos_excute(table, connect_host):
|
||||||
|
conn = taos.connect(host=connect_host, user="root", password="taosdata", config="/etc/taos", database='test')
|
||||||
|
cursor = conn.cursor()
|
||||||
|
for i in range(1000000):
|
||||||
|
pk = random.randint(100001, 300000)
|
||||||
|
time_now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]
|
||||||
|
col1 = random.randint(1, 10000)
|
||||||
|
col2 = random.randint(1, 10000)
|
||||||
|
col3 = random.randint(1, 10000)
|
||||||
|
col4 = random.randint(1, 10000)
|
||||||
|
col5 = random.randint(1, 10000)
|
||||||
|
col6 = random.randint(1, 10000)
|
||||||
|
sql = f"INSERT INTO {table}_{pk} USING {table} TAGS ({pk}) VALUES ('{time_now}', {col1}, {col2}, {col3}, {col4}, {col5}, {col6})"
|
||||||
|
cursor.execute(sql)
|
||||||
|
cursor.close()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
def taos_init(table, connect_host, pk):
|
||||||
|
conn = taos.connect(host=connect_host, user="root", password="taosdata", config="/etc/taos", database='test')
|
||||||
|
cursor = conn.cursor()
|
||||||
|
sql = f"CREATE TABLE {table}_{pk} USING {table} TAGS ({pk})"
|
||||||
|
cursor.execute(sql)
|
||||||
|
cursor.close()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
print("init time:", datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
||||||
|
|
||||||
|
connect_list = ["node1", "node2", "node3", "node4", "node5"]
|
||||||
|
pool = multiprocessing.Pool(processes=108)
|
||||||
|
|
||||||
|
for pk in range(100001, 300000):
|
||||||
|
pool.apply_async(func=taos_init, args=("test", connect_list[pk % 5], pk, ))
|
||||||
|
|
||||||
|
print("start time:", datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
||||||
|
|
||||||
|
for i in range(10000):
|
||||||
|
pool.apply_async(func=taos_excute, args=("test", connect_list[i % 5],))
|
||||||
|
|
||||||
|
pool.close()
|
||||||
|
pool.join()
|
||||||
|
|
||||||
|
print("end time:", datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
|
@ -0,0 +1,71 @@
|
||||||
|
###################################################################
|
||||||
|
# Copyright (c) 2016 by TAOS Technologies, Inc.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# This file is proprietary and confidential to TAOS Technologies.
|
||||||
|
# No part of this file may be reproduced, stored, transmitted,
|
||||||
|
# disclosed or used in any form or by any means other than as
|
||||||
|
# expressly provided by the written permission from Jianhui Tao
|
||||||
|
#
|
||||||
|
###################################################################
|
||||||
|
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import taos
|
||||||
|
from util.log import tdLog
|
||||||
|
from util.cases import tdCases
|
||||||
|
from util.sql import tdSql
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
|
import csv
|
||||||
|
import random
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
|
||||||
|
class TDTestCase:
|
||||||
|
def init(self, conn, logSql):
|
||||||
|
tdLog.debug("start to execute %s" % __file__)
|
||||||
|
tdSql.init(conn.cursor(), logSql)
|
||||||
|
|
||||||
|
self.ts = 1500074556514
|
||||||
|
|
||||||
|
def writeCSV(self):
|
||||||
|
with open('test3.csv','w', encoding='utf-8', newline='') as csvFile:
|
||||||
|
writer = csv.writer(csvFile, dialect='excel')
|
||||||
|
for i in range(1000000):
|
||||||
|
newTimestamp = self.ts + random.randint(10000000, 10000000000) + random.randint(1000, 10000000) + random.randint(1, 1000)
|
||||||
|
d = datetime.datetime.fromtimestamp(newTimestamp / 1000)
|
||||||
|
dt = str(d.strftime("%Y-%m-%d %H:%M:%S.%f"))
|
||||||
|
writer.writerow(["'%s'" % dt, random.randint(1, 100), random.uniform(1, 100), random.randint(1, 100), random.randint(1, 100)])
|
||||||
|
|
||||||
|
def removCSVHeader(self):
|
||||||
|
data = pd.read_csv("ordered.csv")
|
||||||
|
data = data.drop([0])
|
||||||
|
data.to_csv("ordered.csv", header = False, index = False)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
tdSql.prepare()
|
||||||
|
|
||||||
|
tdSql.execute("create table t1(ts timestamp, c1 int, c2 float, c3 int, c4 int)")
|
||||||
|
startTime = time.time()
|
||||||
|
tdSql.execute("insert into t1 file 'outoforder.csv'")
|
||||||
|
duration = time.time() - startTime
|
||||||
|
print("Out of Order - Insert time: %d" % duration)
|
||||||
|
tdSql.query("select count(*) from t1")
|
||||||
|
rows = tdSql.getData(0, 0)
|
||||||
|
|
||||||
|
tdSql.execute("create table t2(ts timestamp, c1 int, c2 float, c3 int, c4 int)")
|
||||||
|
startTime = time.time()
|
||||||
|
tdSql.execute("insert into t2 file 'ordered.csv'")
|
||||||
|
duration = time.time() - startTime
|
||||||
|
print("Ordered - Insert time: %d" % duration)
|
||||||
|
tdSql.query("select count(*) from t2")
|
||||||
|
tdSql.checkData(0,0, rows)
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
tdSql.close()
|
||||||
|
tdLog.success("%s successfully executed" % __file__)
|
||||||
|
|
||||||
|
tdCases.addWindows(__file__, TDTestCase())
|
||||||
|
tdCases.addLinux(__file__, TDTestCase())
|
Loading…
Reference in New Issue