修改importSampleData (#5915)

* Update main.go

根据goland规范修改代码

* Update import_config.go

* 增加go.mod
This commit is contained in:
张金富 2021-04-25 21:59:24 +08:00 committed by GitHub
parent 708dfee8c7
commit cb220d6572
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 274 additions and 338 deletions

View File

@ -97,7 +97,7 @@ go build -o bin/taosimport app/main.go
是否保存统计信息到 tdengine 的 statistic 表中1 是0 否, 默认 0。
* -savetb int
* -savetb string
当 save 为 1 时保存统计信息的表名, 默认 statistic。

View File

@ -7,7 +7,6 @@ import (
"encoding/json"
"flag"
"fmt"
"hash/crc32"
"io"
"log"
"os"
@ -17,47 +16,55 @@ import (
"sync"
"time"
dataimport "github.com/taosdata/TDengine/importSampleData/import"
dataImport "github.com/taosdata/TDengine/importSampleData/import"
_ "github.com/taosdata/driver-go/taosSql"
)
const (
TIMESTAMP = "timestamp"
DATETIME = "datetime"
MILLISECOND = "millisecond"
DEFAULT_STARTTIME int64 = -1
DEFAULT_INTERVAL int64 = 1 * 1000
DEFAULT_DELAY int64 = -1
DEFAULT_STATISTIC_TABLE = "statistic"
// 主键类型必须为 timestamp
TIMESTAMP = "timestamp"
JSON_FORMAT = "json"
CSV_FORMAT = "csv"
SUPERTABLE_PREFIX = "s_"
SUBTABLE_PREFIX = "t_"
// 样例数据中主键时间字段是 millisecond 还是 dateTime 格式
DATETIME = "datetime"
MILLISECOND = "millisecond"
DRIVER_NAME = "taosSql"
STARTTIME_LAYOUT = "2006-01-02 15:04:05.000"
INSERT_PREFIX = "insert into "
DefaultStartTime int64 = -1
DefaultInterval int64 = 1 * 1000 // 导入的记录时间间隔,该设置只会在指定 auto=1 之后生效,否则会根据样例数据自动计算间隔时间。单位为毫秒,默认 1000。
DefaultDelay int64 = -1 //
// 当 save 为 1 时保存统计信息的表名, 默认 statistic。
DefaultStatisticTable = "statistic"
// 样例数据文件格式,可以是 json 或 csv
JsonFormat = "json"
CsvFormat = "csv"
SuperTablePrefix = "s_" // 超级表前缀
SubTablePrefix = "t_" // 子表前缀
DriverName = "taosSql"
StartTimeLayout = "2006-01-02 15:04:05.000"
InsertPrefix = "insert into "
)
var (
cfg string
cases string
hnum int
vnum int
thread int
batch int
auto int
starttimestr string
interval int64
host string
port int
user string
password string
dropdb int
db string
dbparam string
cfg string // 导入配置文件路径,包含样例数据文件相关描述及对应 TDengine 配置信息。默认使用 config/cfg.toml
cases string // 需要导入的场景名称,该名称可从 -cfg 指定的配置文件中 [usecase] 查看可同时导入多个场景中间使用逗号分隔sensor_info,camera_detection默认为 sensor_info
hnum int // 需要将样例数据进行横向扩展的倍数,假设原有样例数据包含 1 张子表 t_0 数据,指定 hnum 为 2 时会根据原有表名创建 t、t_1 两张子表。默认为 100。
vnum int // 需要将样例数据进行纵向扩展的次数,如果设置为 0 代表将历史数据导入至当前时间后持续按照指定间隔导入。默认为 1000表示将样例数据在时间轴上纵向复制1000 次
thread int // 执行导入数据的线程数目,默认为 10
batch int // 执行导入数据时的批量大小,默认为 100。批量是指一次写操作时包含多少条记录
auto int // 是否自动生成样例数据中的主键时间戳1 是0 否, 默认 0
startTimeStr string // 导入的记录开始时间,格式为 "yyyy-MM-dd HH:mm:ss.SSS",不设置会使用样例数据中最小时间,设置后会忽略样例数据中的主键时间,会按照指定的 start 进行导入。如果 auto 为 1则必须设置 start默认为空
interval int64 // 导入的记录时间间隔,该设置只会在指定 auto=1 之后生效,否则会根据样例数据自动计算间隔时间。单位为毫秒,默认 1000
host string // 导入的 TDengine 服务器 IP默认为 127.0.0.1
port int // 导入的 TDengine 服务器端口,默认为 6030
user string // 导入的 TDengine 用户名,默认为 root
password string // 导入的 TDengine 用户密码,默认为 taosdata
dropdb int // 导入数据之前是否删除数据库1 是0 否, 默认 0
db string // 导入的 TDengine 数据库名称,默认为 test_yyyyMMdd
dbparam string // 当指定的数据库不存在时,自动创建数据库时可选项配置参数,如 days 10 cache 16000 ablocks 4默认为空
dataSourceName string
startTime int64
@ -72,10 +79,10 @@ var (
lastStaticTime time.Time
lastTotalRows int64
timeTicker *time.Ticker
delay int64 // default 10 milliseconds
tick int64
save int
saveTable string
delay int64 // 当 vnum 设置为 0 时持续导入的时间间隔,默认为所有场景中最小记录间隔时间的一半,单位 ms。
tick int64 // 打印统计信息的时间间隔,默认 2000 ms。
save int // 是否保存统计信息到 tdengine 的 statistic 表中1 是0 否, 默认 0。
saveTable string // 当 save 为 1 时保存统计信息的表名, 默认 statistic。
)
type superTableConfig struct {
@ -83,7 +90,7 @@ type superTableConfig struct {
endTime int64
cycleTime int64
avgInterval int64
config dataimport.CaseConfig
config dataImport.CaseConfig
}
type scaleTableInfo struct {
@ -92,14 +99,14 @@ type scaleTableInfo struct {
insertRows int64
}
type tableRows struct {
tableName string // tableName
value string // values(...)
}
//type tableRows struct {
// tableName string // tableName
// value string // values(...)
//}
type dataRows struct {
rows []map[string]interface{}
config dataimport.CaseConfig
config dataImport.CaseConfig
}
func (rows dataRows) Len() int {
@ -107,9 +114,9 @@ func (rows dataRows) Len() int {
}
func (rows dataRows) Less(i, j int) bool {
itime := getPrimaryKey(rows.rows[i][rows.config.Timestamp])
jtime := getPrimaryKey(rows.rows[j][rows.config.Timestamp])
return itime < jtime
iTime := getPrimaryKey(rows.rows[i][rows.config.Timestamp])
jTime := getPrimaryKey(rows.rows[j][rows.config.Timestamp])
return iTime < jTime
}
func (rows dataRows) Swap(i, j int) {
@ -123,26 +130,26 @@ func getPrimaryKey(value interface{}) int64 {
}
func init() {
parseArg() //parse argument
parseArg() // parse argument
if db == "" {
//db = "go"
// 导入的 TDengine 数据库名称,默认为 test_yyyyMMdd
db = fmt.Sprintf("test_%s", time.Now().Format("20060102"))
}
if auto == 1 && len(starttimestr) == 0 {
if auto == 1 && len(startTimeStr) == 0 {
log.Fatalf("startTime must be set when auto is 1, the format is \"yyyy-MM-dd HH:mm:ss.SSS\" ")
}
if len(starttimestr) != 0 {
t, err := time.ParseInLocation(STARTTIME_LAYOUT, strings.TrimSpace(starttimestr), time.Local)
if len(startTimeStr) != 0 {
t, err := time.ParseInLocation(StartTimeLayout, strings.TrimSpace(startTimeStr), time.Local)
if err != nil {
log.Fatalf("param startTime %s error, %s\n", starttimestr, err)
log.Fatalf("param startTime %s error, %s\n", startTimeStr, err)
}
startTime = t.UnixNano() / 1e6 // as millisecond
} else {
startTime = DEFAULT_STARTTIME
startTime = DefaultStartTime
}
dataSourceName = fmt.Sprintf("%s:%s@/tcp(%s:%d)/", user, password, host, port)
@ -154,9 +161,9 @@ func init() {
func main() {
importConfig := dataimport.LoadConfig(cfg)
importConfig := dataImport.LoadConfig(cfg)
var caseMinumInterval int64 = -1
var caseMinInterval int64 = -1
for _, userCase := range strings.Split(cases, ",") {
caseConfig, ok := importConfig.UserCases[userCase]
@ -168,7 +175,7 @@ func main() {
checkUserCaseConfig(userCase, &caseConfig)
//read file as map array
// read file as map array
fileRows := readFile(caseConfig)
log.Printf("case [%s] sample data file contains %d rows.\n", userCase, len(fileRows.rows))
@ -177,31 +184,31 @@ func main() {
continue
}
_, exists := superTableConfigMap[caseConfig.Stname]
_, exists := superTableConfigMap[caseConfig.StName]
if !exists {
superTableConfigMap[caseConfig.Stname] = &superTableConfig{config: caseConfig}
superTableConfigMap[caseConfig.StName] = &superTableConfig{config: caseConfig}
} else {
log.Fatalf("the stname of case %s already exist.\n", caseConfig.Stname)
log.Fatalf("the stname of case %s already exist.\n", caseConfig.StName)
}
var start, cycleTime, avgInterval int64 = getSuperTableTimeConfig(fileRows)
// set super table's startTime, cycleTime and avgInterval
superTableConfigMap[caseConfig.Stname].startTime = start
superTableConfigMap[caseConfig.Stname].avgInterval = avgInterval
superTableConfigMap[caseConfig.Stname].cycleTime = cycleTime
superTableConfigMap[caseConfig.StName].startTime = start
superTableConfigMap[caseConfig.StName].cycleTime = cycleTime
superTableConfigMap[caseConfig.StName].avgInterval = avgInterval
if caseMinumInterval == -1 || caseMinumInterval > avgInterval {
caseMinumInterval = avgInterval
if caseMinInterval == -1 || caseMinInterval > avgInterval {
caseMinInterval = avgInterval
}
startStr := time.Unix(0, start*int64(time.Millisecond)).Format(STARTTIME_LAYOUT)
startStr := time.Unix(0, start*int64(time.Millisecond)).Format(StartTimeLayout)
log.Printf("case [%s] startTime %s(%d), average dataInterval %d ms, cycleTime %d ms.\n", userCase, startStr, start, avgInterval, cycleTime)
}
if DEFAULT_DELAY == delay {
if DefaultDelay == delay {
// default delay
delay = caseMinumInterval / 2
delay = caseMinInterval / 2
if delay < 1 {
delay = 1
}
@ -218,7 +225,7 @@ func main() {
createSuperTable(superTableConfigMap)
log.Printf("create %d superTable ,used %d ms.\n", superTableNum, time.Since(start)/1e6)
//create sub table
// create sub table
start = time.Now()
createSubTable(subTableMap)
log.Printf("create %d times of %d subtable ,all %d tables, used %d ms.\n", hnum, len(subTableMap), len(scaleTableMap), time.Since(start)/1e6)
@ -278,7 +285,7 @@ func staticSpeed() {
defer connection.Close()
if save == 1 {
connection.Exec("use " + db)
_, _ = connection.Exec("use " + db)
_, err := connection.Exec("create table if not exists " + saveTable + "(ts timestamp, speed int)")
if err != nil {
log.Fatalf("create %s Table error: %s\n", saveTable, err)
@ -294,12 +301,12 @@ func staticSpeed() {
total := getTotalRows(successRows)
currentSuccessRows := total - lastTotalRows
speed := currentSuccessRows * 1e9 / int64(usedTime)
speed := currentSuccessRows * 1e9 / usedTime
log.Printf("insert %d rows, used %d ms, speed %d rows/s", currentSuccessRows, usedTime/1e6, speed)
if save == 1 {
insertSql := fmt.Sprintf("insert into %s values(%d, %d)", saveTable, currentTime.UnixNano()/1e6, speed)
connection.Exec(insertSql)
_, _ = connection.Exec(insertSql)
}
lastStaticTime = currentTime
@ -327,12 +334,13 @@ func getSuperTableTimeConfig(fileRows dataRows) (start, cycleTime, avgInterval i
} else {
// use the sample data primary timestamp
sort.Sort(fileRows) // sort the file data by the primarykey
sort.Sort(fileRows) // sort the file data by the primaryKey
minTime := getPrimaryKey(fileRows.rows[0][fileRows.config.Timestamp])
maxTime := getPrimaryKey(fileRows.rows[len(fileRows.rows)-1][fileRows.config.Timestamp])
start = minTime // default startTime use the minTime
if DEFAULT_STARTTIME != startTime {
// 设置了start时间的话 按照start来
if DefaultStartTime != startTime {
start = startTime
}
@ -350,31 +358,21 @@ func getSuperTableTimeConfig(fileRows dataRows) (start, cycleTime, avgInterval i
return
}
func createStatisticTable() {
connection := getConnection()
defer connection.Close()
_, err := connection.Exec("create table if not exist " + db + "." + saveTable + "(ts timestamp, speed int)")
if err != nil {
log.Fatalf("createStatisticTable error: %s\n", err)
}
}
func createSubTable(subTableMaps map[string]*dataRows) {
connection := getConnection()
defer connection.Close()
connection.Exec("use " + db)
_, _ = connection.Exec("use " + db)
createTablePrefix := "create table if not exists "
var buffer bytes.Buffer
for subTableName := range subTableMaps {
superTableName := getSuperTableName(subTableMaps[subTableName].config.Stname)
tagValues := subTableMaps[subTableName].rows[0] // the first rows values as tags
superTableName := getSuperTableName(subTableMaps[subTableName].config.StName)
firstRowValues := subTableMaps[subTableName].rows[0] // the first rows values as tags
buffers := bytes.Buffer{}
// create table t using supertTable tags(...);
// create table t using superTable tags(...);
for i := 0; i < hnum; i++ {
tableName := getScaleSubTableName(subTableName, i)
@ -384,21 +382,21 @@ func createSubTable(subTableMaps map[string]*dataRows) {
}
scaleTableNames = append(scaleTableNames, tableName)
buffers.WriteString(createTablePrefix)
buffers.WriteString(tableName)
buffers.WriteString(" using ")
buffers.WriteString(superTableName)
buffers.WriteString(" tags(")
buffer.WriteString(createTablePrefix)
buffer.WriteString(tableName)
buffer.WriteString(" using ")
buffer.WriteString(superTableName)
buffer.WriteString(" tags(")
for _, tag := range subTableMaps[subTableName].config.Tags {
tagValue := fmt.Sprintf("%v", tagValues[strings.ToLower(tag.Name)])
buffers.WriteString("'" + tagValue + "'")
buffers.WriteString(",")
tagValue := fmt.Sprintf("%v", firstRowValues[strings.ToLower(tag.Name)])
buffer.WriteString("'" + tagValue + "'")
buffer.WriteString(",")
}
buffers.Truncate(buffers.Len() - 1)
buffers.WriteString(")")
buffer.Truncate(buffer.Len() - 1)
buffer.WriteString(")")
createTableSql := buffers.String()
buffers.Reset()
createTableSql := buffer.String()
buffer.Reset()
//log.Printf("create table: %s\n", createTableSql)
_, err := connection.Exec(createTableSql)
@ -420,7 +418,7 @@ func createSuperTable(superTableConfigMap map[string]*superTableConfig) {
if err != nil {
log.Fatalf("drop database error: %s\n", err)
}
log.Printf("dropDb: %s\n", dropDbSql)
log.Printf("dropdb: %s\n", dropDbSql)
}
createDbSql := "create database if not exists " + db + " " + dbparam
@ -431,7 +429,7 @@ func createSuperTable(superTableConfigMap map[string]*superTableConfig) {
}
log.Printf("createDb: %s\n", createDbSql)
connection.Exec("use " + db)
_, _ = connection.Exec("use " + db)
prefix := "create table if not exists "
var buffer bytes.Buffer
@ -464,7 +462,7 @@ func createSuperTable(superTableConfigMap map[string]*superTableConfig) {
createSql := buffer.String()
buffer.Reset()
//log.Printf("supertable: %s\n", createSql)
//log.Printf("superTable: %s\n", createSql)
_, err = connection.Exec(createSql)
if err != nil {
log.Fatalf("create supertable error: %s\n", err)
@ -473,15 +471,15 @@ func createSuperTable(superTableConfigMap map[string]*superTableConfig) {
}
func getScaleSubTableName(subTableName string, hnum int) string {
if hnum == 0 {
func getScaleSubTableName(subTableName string, hNum int) string {
if hNum == 0 {
return subTableName
}
return fmt.Sprintf("%s_%d", subTableName, hnum)
return fmt.Sprintf("%s_%d", subTableName, hNum)
}
func getSuperTableName(stname string) string {
return SUPERTABLE_PREFIX + stname
func getSuperTableName(stName string) string {
return SuperTablePrefix + stName
}
/**
@ -499,7 +497,7 @@ func normalizationData(fileRows dataRows, minTime int64) int64 {
row[fileRows.config.Timestamp] = getPrimaryKey(row[fileRows.config.Timestamp]) - minTime
subTableName := getSubTableName(tableValue, fileRows.config.Stname)
subTableName := getSubTableName(tableValue, fileRows.config.StName)
value, ok := subTableMap[subTableName]
if !ok {
@ -527,7 +525,7 @@ func normalizationDataWithSameInterval(fileRows dataRows, avgInterval int64) int
continue
}
subTableName := getSubTableName(tableValue, fileRows.config.Stname)
subTableName := getSubTableName(tableValue, fileRows.config.StName)
value, ok := currSubTableMap[subTableName]
if !ok {
@ -543,7 +541,7 @@ func normalizationDataWithSameInterval(fileRows dataRows, avgInterval int64) int
}
var maxRows, tableRows int = 0, 0
var maxRows, tableRows = 0, 0
for tableName := range currSubTableMap {
tableRows = len(currSubTableMap[tableName].rows)
subTableMap[tableName] = currSubTableMap[tableName] // add to global subTableMap
@ -556,7 +554,7 @@ func normalizationDataWithSameInterval(fileRows dataRows, avgInterval int64) int
}
func getSubTableName(subTableValue string, superTableName string) string {
return SUBTABLE_PREFIX + subTableValue + "_" + superTableName
return SubTablePrefix + subTableValue + "_" + superTableName
}
func insertData(threadIndex, start, end int, wg *sync.WaitGroup, successRows []int64) {
@ -564,25 +562,25 @@ func insertData(threadIndex, start, end int, wg *sync.WaitGroup, successRows []i
defer connection.Close()
defer wg.Done()
connection.Exec("use " + db) // use db
_, _ = connection.Exec("use " + db) // use db
log.Printf("thread-%d start insert into [%d, %d) subtables.\n", threadIndex, start, end)
num := 0
subTables := scaleTableNames[start:end]
var buffer bytes.Buffer
for {
var currSuccessRows int64
var appendRows int
var lastTableName string
buffers := bytes.Buffer{}
buffers.WriteString(INSERT_PREFIX)
buffer.WriteString(InsertPrefix)
for _, tableName := range subTables {
subTableInfo := subTableMap[scaleTableMap[tableName].subTableName]
subTableRows := int64(len(subTableInfo.rows))
superTableConf := superTableConfigMap[subTableInfo.config.Stname]
superTableConf := superTableConfigMap[subTableInfo.config.StName]
tableStartTime := superTableConf.startTime
var tableEndTime int64
@ -605,40 +603,35 @@ func insertData(threadIndex, start, end int, wg *sync.WaitGroup, successRows []i
// append
if lastTableName != tableName {
buffers.WriteString(tableName)
buffers.WriteString(" values")
buffer.WriteString(tableName)
buffer.WriteString(" values")
}
lastTableName = tableName
buffers.WriteString("(")
buffers.WriteString(fmt.Sprintf("%v", currentTime))
buffers.WriteString(",")
buffer.WriteString("(")
buffer.WriteString(fmt.Sprintf("%v", currentTime))
buffer.WriteString(",")
// fieldNum := len(subTableInfo.config.Fields)
for _, field := range subTableInfo.config.Fields {
buffers.WriteString(getFieldValue(currentRow[strings.ToLower(field.Name)]))
buffers.WriteString(",")
// if( i != fieldNum -1){
// }
buffer.WriteString(getFieldValue(currentRow[strings.ToLower(field.Name)]))
buffer.WriteString(",")
}
buffers.Truncate(buffers.Len() - 1)
buffers.WriteString(") ")
buffer.Truncate(buffer.Len() - 1)
buffer.WriteString(") ")
appendRows++
insertRows++
if appendRows == batch {
// executebatch
insertSql := buffers.String()
connection.Exec("use " + db)
// executeBatch
insertSql := buffer.String()
affectedRows := executeBatchInsert(insertSql, connection)
successRows[threadIndex] += affectedRows
currSuccessRows += affectedRows
buffers.Reset()
buffers.WriteString(INSERT_PREFIX)
buffer.Reset()
buffer.WriteString(InsertPrefix)
lastTableName = ""
appendRows = 0
}
@ -654,15 +647,14 @@ func insertData(threadIndex, start, end int, wg *sync.WaitGroup, successRows []i
// left := len(rows)
if appendRows > 0 {
// executebatch
insertSql := buffers.String()
connection.Exec("use " + db)
// executeBatch
insertSql := buffer.String()
affectedRows := executeBatchInsert(insertSql, connection)
successRows[threadIndex] += affectedRows
currSuccessRows += affectedRows
buffers.Reset()
buffer.Reset()
}
// log.Printf("thread-%d finished insert %d rows, used %d ms.", threadIndex, currSuccessRows, time.Since(threadStartTime)/1e6)
@ -688,65 +680,10 @@ func insertData(threadIndex, start, end int, wg *sync.WaitGroup, successRows []i
}
func buildSql(rows []tableRows) string {
var lastTableName string
buffers := bytes.Buffer{}
for i, row := range rows {
if i == 0 {
lastTableName = row.tableName
buffers.WriteString(INSERT_PREFIX)
buffers.WriteString(row.tableName)
buffers.WriteString(" values")
buffers.WriteString(row.value)
continue
}
if lastTableName == row.tableName {
buffers.WriteString(row.value)
} else {
buffers.WriteString(" ")
buffers.WriteString(row.tableName)
buffers.WriteString(" values")
buffers.WriteString(row.value)
lastTableName = row.tableName
}
}
inserSql := buffers.String()
return inserSql
}
func buildRow(tableName string, currentTime int64, subTableInfo *dataRows, currentRow map[string]interface{}) tableRows {
tableRows := tableRows{tableName: tableName}
buffers := bytes.Buffer{}
buffers.WriteString("(")
buffers.WriteString(fmt.Sprintf("%v", currentTime))
buffers.WriteString(",")
for _, field := range subTableInfo.config.Fields {
buffers.WriteString(getFieldValue(currentRow[strings.ToLower(field.Name)]))
buffers.WriteString(",")
}
buffers.Truncate(buffers.Len() - 1)
buffers.WriteString(")")
insertSql := buffers.String()
tableRows.value = insertSql
return tableRows
}
func executeBatchInsert(insertSql string, connection *sql.DB) int64 {
result, error := connection.Exec(insertSql)
if error != nil {
log.Printf("execute insertSql %s error, %s\n", insertSql, error)
result, err := connection.Exec(insertSql)
if err != nil {
log.Printf("execute insertSql %s error, %s\n", insertSql, err)
return 0
}
affected, _ := result.RowsAffected()
@ -754,7 +691,6 @@ func executeBatchInsert(insertSql string, connection *sql.DB) int64 {
affected = 0
}
return affected
// return 0
}
func getFieldValue(fieldValue interface{}) string {
@ -762,7 +698,7 @@ func getFieldValue(fieldValue interface{}) string {
}
func getConnection() *sql.DB {
db, err := sql.Open(DRIVER_NAME, dataSourceName)
db, err := sql.Open(DriverName, dataSourceName)
if err != nil {
panic(err)
}
@ -773,19 +709,11 @@ func getSubTableNameValue(suffix interface{}) string {
return fmt.Sprintf("%v", suffix)
}
func hash(s string) int {
v := int(crc32.ChecksumIEEE([]byte(s)))
if v < 0 {
return -v
}
return v
}
func readFile(config dataimport.CaseConfig) dataRows {
func readFile(config dataImport.CaseConfig) dataRows {
fileFormat := strings.ToLower(config.Format)
if fileFormat == JSON_FORMAT {
if fileFormat == JsonFormat {
return readJSONFile(config)
} else if fileFormat == CSV_FORMAT {
} else if fileFormat == CsvFormat {
return readCSVFile(config)
}
@ -793,7 +721,7 @@ func readFile(config dataimport.CaseConfig) dataRows {
return dataRows{}
}
func readCSVFile(config dataimport.CaseConfig) dataRows {
func readCSVFile(config dataImport.CaseConfig) dataRows {
var rows dataRows
f, err := os.Open(config.FilePath)
if err != nil {
@ -813,7 +741,7 @@ func readCSVFile(config dataimport.CaseConfig) dataRows {
line := strings.ToLower(string(lineBytes))
titles := strings.Split(line, config.Separator)
if len(titles) < 3 {
// need suffix、 primarykey and at least one other field
// need suffix、 primaryKey and at least one other field
log.Printf("the first line of file %s should be title row, and at least 3 field.\n", config.FilePath)
return rows
}
@ -848,7 +776,7 @@ func readCSVFile(config dataimport.CaseConfig) dataRows {
}
// if the primary key valid
primaryKeyValue := getPrimaryKeyMillisec(config.Timestamp, config.TimestampType, config.TimestampTypeFormat, dataMap)
primaryKeyValue := getPrimaryKeyMilliSec(config.Timestamp, config.TimestampType, config.TimestampTypeFormat, dataMap)
if primaryKeyValue == -1 {
log.Printf("the Timestamp[%s] of line %d is not valid, will filtered.\n", config.Timestamp, lineNum)
continue
@ -861,7 +789,7 @@ func readCSVFile(config dataimport.CaseConfig) dataRows {
return rows
}
func readJSONFile(config dataimport.CaseConfig) dataRows {
func readJSONFile(config dataImport.CaseConfig) dataRows {
var rows dataRows
f, err := os.Open(config.FilePath)
@ -899,7 +827,7 @@ func readJSONFile(config dataimport.CaseConfig) dataRows {
continue
}
primaryKeyValue := getPrimaryKeyMillisec(config.Timestamp, config.TimestampType, config.TimestampTypeFormat, line)
primaryKeyValue := getPrimaryKeyMilliSec(config.Timestamp, config.TimestampType, config.TimestampTypeFormat, line)
if primaryKeyValue == -1 {
log.Printf("the Timestamp[%s] of line %d is not valid, will filtered.\n", config.Timestamp, lineNum)
continue
@ -916,7 +844,7 @@ func readJSONFile(config dataimport.CaseConfig) dataRows {
/**
* get primary key as millisecond , otherwise return -1
*/
func getPrimaryKeyMillisec(key string, valueType string, valueFormat string, line map[string]interface{}) int64 {
func getPrimaryKeyMilliSec(key string, valueType string, valueFormat string, line map[string]interface{}) int64 {
if !existMapKeyAndNotEmpty(key, line) {
return -1
}
@ -971,13 +899,13 @@ func existMapKeyAndNotEmpty(key string, maps map[string]interface{}) bool {
return true
}
func checkUserCaseConfig(caseName string, caseConfig *dataimport.CaseConfig) {
func checkUserCaseConfig(caseName string, caseConfig *dataImport.CaseConfig) {
if len(caseConfig.Stname) == 0 {
if len(caseConfig.StName) == 0 {
log.Fatalf("the stname of case %s can't be empty\n", caseName)
}
caseConfig.Stname = strings.ToLower(caseConfig.Stname)
caseConfig.StName = strings.ToLower(caseConfig.StName)
if len(caseConfig.Tags) == 0 {
log.Fatalf("the tags of case %s can't be empty\n", caseName)
@ -1029,24 +957,24 @@ func checkUserCaseConfig(caseName string, caseConfig *dataimport.CaseConfig) {
}
func parseArg() {
flag.StringVar(&cfg, "cfg", "config/cfg.toml", "configuration file which describes usecase and data format.")
flag.StringVar(&cases, "cases", "sensor_info", "usecase for dataset to be imported. Multiple choices can be separated by comma, for example, -cases sensor_info,camera_detection.")
flag.StringVar(&cfg, "cfg", "config/cfg.toml", "configuration file which describes useCase and data format.")
flag.StringVar(&cases, "cases", "sensor_info", "useCase for dataset to be imported. Multiple choices can be separated by comma, for example, -cases sensor_info,camera_detection.")
flag.IntVar(&hnum, "hnum", 100, "magnification factor of the sample tables. For example, if hnum is 100 and in the sample data there are 10 tables, then 10x100=1000 tables will be created in the database.")
flag.IntVar(&vnum, "vnum", 1000, "copies of the sample records in each table. If set to 0this program will never stop simulating and importing data even if the timestamp has passed current time.")
flag.Int64Var(&delay, "delay", DEFAULT_DELAY, "the delay time interval(millisecond) to continue generating data when vnum set 0.")
flag.Int64Var(&delay, "delay", DefaultDelay, "the delay time interval(millisecond) to continue generating data when vnum set 0.")
flag.Int64Var(&tick, "tick", 2000, "the tick time interval(millisecond) to print statistic info.")
flag.IntVar(&save, "save", 0, "whether to save the statistical info into 'statistic' table. 0 is disabled and 1 is enabled.")
flag.StringVar(&saveTable, "savetb", DEFAULT_STATISTIC_TABLE, "the table to save 'statistic' info when save set 1.")
flag.StringVar(&saveTable, "savetb", DefaultStatisticTable, "the table to save 'statistic' info when save set 1.")
flag.IntVar(&thread, "thread", 10, "number of threads to import data.")
flag.IntVar(&batch, "batch", 100, "rows of records in one import batch.")
flag.IntVar(&auto, "auto", 0, "whether to use the starttime and interval specified by users when simulating the data. 0 is disabled and 1 is enabled.")
flag.StringVar(&starttimestr, "start", "", "the starting timestamp of simulated data, in the format of yyyy-MM-dd HH:mm:ss.SSS. If not specified, the ealiest timestamp in the sample data will be set as the starttime.")
flag.Int64Var(&interval, "interval", DEFAULT_INTERVAL, "time inteval between two consecutive records, in the unit of millisecond. Only valid when auto is 1.")
flag.IntVar(&auto, "auto", 0, "whether to use the startTime and interval specified by users when simulating the data. 0 is disabled and 1 is enabled.")
flag.StringVar(&startTimeStr, "start", "", "the starting timestamp of simulated data, in the format of yyyy-MM-dd HH:mm:ss.SSS. If not specified, the earliest timestamp in the sample data will be set as the startTime.")
flag.Int64Var(&interval, "interval", DefaultInterval, "time interval between two consecutive records, in the unit of millisecond. Only valid when auto is 1.")
flag.StringVar(&host, "host", "127.0.0.1", "tdengine server ip.")
flag.IntVar(&port, "port", 6030, "tdengine server port.")
flag.StringVar(&user, "user", "root", "user name to login into the database.")
flag.StringVar(&password, "password", "taosdata", "the import tdengine user password")
flag.IntVar(&dropdb, "dropdb", 0, "whether to drop the existing datbase. 1 is yes and 0 otherwise.")
flag.IntVar(&dropdb, "dropdb", 0, "whether to drop the existing database. 1 is yes and 0 otherwise.")
flag.StringVar(&db, "db", "", "name of the database to store data.")
flag.StringVar(&dbparam, "dbparam", "", "database configurations when it is created.")
@ -1066,7 +994,7 @@ func printArg() {
fmt.Println("-thread:", thread)
fmt.Println("-batch:", batch)
fmt.Println("-auto:", auto)
fmt.Println("-start:", starttimestr)
fmt.Println("-start:", startTimeStr)
fmt.Println("-interval:", interval)
fmt.Println("-host:", host)
fmt.Println("-port", port)

View File

@ -899,103 +899,103 @@ devid,location,color,devgroup,ts,temperature,humidity
8, haerbing, yellow, 2, 1575129697000, 31, 16.321497
8, haerbing, yellow, 2, 1575129698000, 25, 15.864515
8, haerbing, yellow, 2, 1575129699000, 25, 16.492443
9, sijiazhuang, blue, 0, 1575129600000, 23, 16.002889
9, sijiazhuang, blue, 0, 1575129601000, 26, 17.034610
9, sijiazhuang, blue, 0, 1575129602000, 29, 12.892319
9, sijiazhuang, blue, 0, 1575129603000, 34, 15.321807
9, sijiazhuang, blue, 0, 1575129604000, 29, 12.562642
9, sijiazhuang, blue, 0, 1575129605000, 32, 17.190246
9, sijiazhuang, blue, 0, 1575129606000, 19, 15.361774
9, sijiazhuang, blue, 0, 1575129607000, 26, 15.022364
9, sijiazhuang, blue, 0, 1575129608000, 31, 14.837084
9, sijiazhuang, blue, 0, 1575129609000, 25, 11.554289
9, sijiazhuang, blue, 0, 1575129610000, 21, 15.313973
9, sijiazhuang, blue, 0, 1575129611000, 27, 18.621783
9, sijiazhuang, blue, 0, 1575129612000, 31, 18.018101
9, sijiazhuang, blue, 0, 1575129613000, 23, 14.421450
9, sijiazhuang, blue, 0, 1575129614000, 28, 10.833142
9, sijiazhuang, blue, 0, 1575129615000, 33, 18.169837
9, sijiazhuang, blue, 0, 1575129616000, 21, 18.772730
9, sijiazhuang, blue, 0, 1575129617000, 24, 18.893146
9, sijiazhuang, blue, 0, 1575129618000, 24, 10.290187
9, sijiazhuang, blue, 0, 1575129619000, 23, 17.393345
9, sijiazhuang, blue, 0, 1575129620000, 30, 12.949215
9, sijiazhuang, blue, 0, 1575129621000, 19, 19.267621
9, sijiazhuang, blue, 0, 1575129622000, 33, 14.831735
9, sijiazhuang, blue, 0, 1575129623000, 21, 14.711125
9, sijiazhuang, blue, 0, 1575129624000, 16, 17.168485
9, sijiazhuang, blue, 0, 1575129625000, 17, 16.426433
9, sijiazhuang, blue, 0, 1575129626000, 19, 13.879050
9, sijiazhuang, blue, 0, 1575129627000, 21, 18.308168
9, sijiazhuang, blue, 0, 1575129628000, 17, 10.845681
9, sijiazhuang, blue, 0, 1575129629000, 20, 10.238272
9, sijiazhuang, blue, 0, 1575129630000, 19, 19.424976
9, sijiazhuang, blue, 0, 1575129631000, 31, 13.885909
9, sijiazhuang, blue, 0, 1575129632000, 15, 19.264740
9, sijiazhuang, blue, 0, 1575129633000, 30, 12.460645
9, sijiazhuang, blue, 0, 1575129634000, 27, 17.608036
9, sijiazhuang, blue, 0, 1575129635000, 25, 13.493812
9, sijiazhuang, blue, 0, 1575129636000, 19, 10.955939
9, sijiazhuang, blue, 0, 1575129637000, 24, 11.956587
9, sijiazhuang, blue, 0, 1575129638000, 15, 19.141381
9, sijiazhuang, blue, 0, 1575129639000, 24, 14.801530
9, sijiazhuang, blue, 0, 1575129640000, 17, 14.347318
9, sijiazhuang, blue, 0, 1575129641000, 29, 14.803237
9, sijiazhuang, blue, 0, 1575129642000, 28, 10.342297
9, sijiazhuang, blue, 0, 1575129643000, 29, 19.368282
9, sijiazhuang, blue, 0, 1575129644000, 31, 17.491654
9, sijiazhuang, blue, 0, 1575129645000, 18, 13.161736
9, sijiazhuang, blue, 0, 1575129646000, 17, 16.067354
9, sijiazhuang, blue, 0, 1575129647000, 18, 13.736465
9, sijiazhuang, blue, 0, 1575129648000, 23, 19.103276
9, sijiazhuang, blue, 0, 1575129649000, 29, 16.075892
9, sijiazhuang, blue, 0, 1575129650000, 21, 10.728566
9, sijiazhuang, blue, 0, 1575129651000, 15, 18.921849
9, sijiazhuang, blue, 0, 1575129652000, 24, 16.914709
9, sijiazhuang, blue, 0, 1575129653000, 19, 13.501651
9, sijiazhuang, blue, 0, 1575129654000, 19, 13.538347
9, sijiazhuang, blue, 0, 1575129655000, 16, 13.261095
9, sijiazhuang, blue, 0, 1575129656000, 32, 16.315746
9, sijiazhuang, blue, 0, 1575129657000, 27, 16.400939
9, sijiazhuang, blue, 0, 1575129658000, 24, 13.321819
9, sijiazhuang, blue, 0, 1575129659000, 27, 19.070181
9, sijiazhuang, blue, 0, 1575129660000, 27, 13.040922
9, sijiazhuang, blue, 0, 1575129661000, 32, 10.872530
9, sijiazhuang, blue, 0, 1575129662000, 28, 16.428657
9, sijiazhuang, blue, 0, 1575129663000, 32, 13.883854
9, sijiazhuang, blue, 0, 1575129664000, 33, 14.299554
9, sijiazhuang, blue, 0, 1575129665000, 30, 16.445130
9, sijiazhuang, blue, 0, 1575129666000, 15, 18.059404
9, sijiazhuang, blue, 0, 1575129667000, 21, 12.348847
9, sijiazhuang, blue, 0, 1575129668000, 32, 13.315378
9, sijiazhuang, blue, 0, 1575129669000, 17, 15.689507
9, sijiazhuang, blue, 0, 1575129670000, 22, 15.591808
9, sijiazhuang, blue, 0, 1575129671000, 27, 16.386065
9, sijiazhuang, blue, 0, 1575129672000, 25, 10.564803
9, sijiazhuang, blue, 0, 1575129673000, 20, 12.276544
9, sijiazhuang, blue, 0, 1575129674000, 26, 15.828786
9, sijiazhuang, blue, 0, 1575129675000, 18, 12.236420
9, sijiazhuang, blue, 0, 1575129676000, 15, 19.439522
9, sijiazhuang, blue, 0, 1575129677000, 19, 19.831531
9, sijiazhuang, blue, 0, 1575129678000, 22, 17.115744
9, sijiazhuang, blue, 0, 1575129679000, 29, 19.879456
9, sijiazhuang, blue, 0, 1575129680000, 34, 10.207136
9, sijiazhuang, blue, 0, 1575129681000, 16, 17.633523
9, sijiazhuang, blue, 0, 1575129682000, 15, 14.227873
9, sijiazhuang, blue, 0, 1575129683000, 34, 12.027768
9, sijiazhuang, blue, 0, 1575129684000, 22, 11.376610
9, sijiazhuang, blue, 0, 1575129685000, 21, 11.711299
9, sijiazhuang, blue, 0, 1575129686000, 33, 14.281126
9, sijiazhuang, blue, 0, 1575129687000, 31, 10.895302
9, sijiazhuang, blue, 0, 1575129688000, 31, 13.971350
9, sijiazhuang, blue, 0, 1575129689000, 15, 15.262790
9, sijiazhuang, blue, 0, 1575129690000, 23, 12.440568
9, sijiazhuang, blue, 0, 1575129691000, 32, 19.731267
9, sijiazhuang, blue, 0, 1575129692000, 22, 10.518092
9, sijiazhuang, blue, 0, 1575129693000, 34, 17.863021
9, sijiazhuang, blue, 0, 1575129694000, 28, 11.478909
9, sijiazhuang, blue, 0, 1575129695000, 16, 15.075524
9, sijiazhuang, blue, 0, 1575129696000, 16, 10.292127
9, sijiazhuang, blue, 0, 1575129697000, 22, 13.716012
9, sijiazhuang, blue, 0, 1575129698000, 32, 10.906551
9, sijiazhuang, blue, 0, 1575129699000, 19, 18.386868
9, shijiazhuang, blue, 0, 1575129600000, 23, 16.002889
9, shijiazhuang, blue, 0, 1575129601000, 26, 17.034610
9, shijiazhuang, blue, 0, 1575129602000, 29, 12.892319
9, shijiazhuang, blue, 0, 1575129603000, 34, 15.321807
9, shijiazhuang, blue, 0, 1575129604000, 29, 12.562642
9, shijiazhuang, blue, 0, 1575129605000, 32, 17.190246
9, shijiazhuang, blue, 0, 1575129606000, 19, 15.361774
9, shijiazhuang, blue, 0, 1575129607000, 26, 15.022364
9, shijiazhuang, blue, 0, 1575129608000, 31, 14.837084
9, shijiazhuang, blue, 0, 1575129609000, 25, 11.554289
9, shijiazhuang, blue, 0, 1575129610000, 21, 15.313973
9, shijiazhuang, blue, 0, 1575129611000, 27, 18.621783
9, shijiazhuang, blue, 0, 1575129612000, 31, 18.018101
9, shijiazhuang, blue, 0, 1575129613000, 23, 14.421450
9, shijiazhuang, blue, 0, 1575129614000, 28, 10.833142
9, shijiazhuang, blue, 0, 1575129615000, 33, 18.169837
9, shijiazhuang, blue, 0, 1575129616000, 21, 18.772730
9, shijiazhuang, blue, 0, 1575129617000, 24, 18.893146
9, shijiazhuang, blue, 0, 1575129618000, 24, 10.290187
9, shijiazhuang, blue, 0, 1575129619000, 23, 17.393345
9, shijiazhuang, blue, 0, 1575129620000, 30, 12.949215
9, shijiazhuang, blue, 0, 1575129621000, 19, 19.267621
9, shijiazhuang, blue, 0, 1575129622000, 33, 14.831735
9, shijiazhuang, blue, 0, 1575129623000, 21, 14.711125
9, shijiazhuang, blue, 0, 1575129624000, 16, 17.168485
9, shijiazhuang, blue, 0, 1575129625000, 17, 16.426433
9, shijiazhuang, blue, 0, 1575129626000, 19, 13.879050
9, shijiazhuang, blue, 0, 1575129627000, 21, 18.308168
9, shijiazhuang, blue, 0, 1575129628000, 17, 10.845681
9, shijiazhuang, blue, 0, 1575129629000, 20, 10.238272
9, shijiazhuang, blue, 0, 1575129630000, 19, 19.424976
9, shijiazhuang, blue, 0, 1575129631000, 31, 13.885909
9, shijiazhuang, blue, 0, 1575129632000, 15, 19.264740
9, shijiazhuang, blue, 0, 1575129633000, 30, 12.460645
9, shijiazhuang, blue, 0, 1575129634000, 27, 17.608036
9, shijiazhuang, blue, 0, 1575129635000, 25, 13.493812
9, shijiazhuang, blue, 0, 1575129636000, 19, 10.955939
9, shijiazhuang, blue, 0, 1575129637000, 24, 11.956587
9, shijiazhuang, blue, 0, 1575129638000, 15, 19.141381
9, shijiazhuang, blue, 0, 1575129639000, 24, 14.801530
9, shijiazhuang, blue, 0, 1575129640000, 17, 14.347318
9, shijiazhuang, blue, 0, 1575129641000, 29, 14.803237
9, shijiazhuang, blue, 0, 1575129642000, 28, 10.342297
9, shijiazhuang, blue, 0, 1575129643000, 29, 19.368282
9, shijiazhuang, blue, 0, 1575129644000, 31, 17.491654
9, shijiazhuang, blue, 0, 1575129645000, 18, 13.161736
9, shijiazhuang, blue, 0, 1575129646000, 17, 16.067354
9, shijiazhuang, blue, 0, 1575129647000, 18, 13.736465
9, shijiazhuang, blue, 0, 1575129648000, 23, 19.103276
9, shijiazhuang, blue, 0, 1575129649000, 29, 16.075892
9, shijiazhuang, blue, 0, 1575129650000, 21, 10.728566
9, shijiazhuang, blue, 0, 1575129651000, 15, 18.921849
9, shijiazhuang, blue, 0, 1575129652000, 24, 16.914709
9, shijiazhuang, blue, 0, 1575129653000, 19, 13.501651
9, shijiazhuang, blue, 0, 1575129654000, 19, 13.538347
9, shijiazhuang, blue, 0, 1575129655000, 16, 13.261095
9, shijiazhuang, blue, 0, 1575129656000, 32, 16.315746
9, shijiazhuang, blue, 0, 1575129657000, 27, 16.400939
9, shijiazhuang, blue, 0, 1575129658000, 24, 13.321819
9, shijiazhuang, blue, 0, 1575129659000, 27, 19.070181
9, shijiazhuang, blue, 0, 1575129660000, 27, 13.040922
9, shijiazhuang, blue, 0, 1575129661000, 32, 10.872530
9, shijiazhuang, blue, 0, 1575129662000, 28, 16.428657
9, shijiazhuang, blue, 0, 1575129663000, 32, 13.883854
9, shijiazhuang, blue, 0, 1575129664000, 33, 14.299554
9, shijiazhuang, blue, 0, 1575129665000, 30, 16.445130
9, shijiazhuang, blue, 0, 1575129666000, 15, 18.059404
9, shijiazhuang, blue, 0, 1575129667000, 21, 12.348847
9, shijiazhuang, blue, 0, 1575129668000, 32, 13.315378
9, shijiazhuang, blue, 0, 1575129669000, 17, 15.689507
9, shijiazhuang, blue, 0, 1575129670000, 22, 15.591808
9, shijiazhuang, blue, 0, 1575129671000, 27, 16.386065
9, shijiazhuang, blue, 0, 1575129672000, 25, 10.564803
9, shijiazhuang, blue, 0, 1575129673000, 20, 12.276544
9, shijiazhuang, blue, 0, 1575129674000, 26, 15.828786
9, shijiazhuang, blue, 0, 1575129675000, 18, 12.236420
9, shijiazhuang, blue, 0, 1575129676000, 15, 19.439522
9, shijiazhuang, blue, 0, 1575129677000, 19, 19.831531
9, shijiazhuang, blue, 0, 1575129678000, 22, 17.115744
9, shijiazhuang, blue, 0, 1575129679000, 29, 19.879456
9, shijiazhuang, blue, 0, 1575129680000, 34, 10.207136
9, shijiazhuang, blue, 0, 1575129681000, 16, 17.633523
9, shijiazhuang, blue, 0, 1575129682000, 15, 14.227873
9, shijiazhuang, blue, 0, 1575129683000, 34, 12.027768
9, shijiazhuang, blue, 0, 1575129684000, 22, 11.376610
9, shijiazhuang, blue, 0, 1575129685000, 21, 11.711299
9, shijiazhuang, blue, 0, 1575129686000, 33, 14.281126
9, shijiazhuang, blue, 0, 1575129687000, 31, 10.895302
9, shijiazhuang, blue, 0, 1575129688000, 31, 13.971350
9, shijiazhuang, blue, 0, 1575129689000, 15, 15.262790
9, shijiazhuang, blue, 0, 1575129690000, 23, 12.440568
9, shijiazhuang, blue, 0, 1575129691000, 32, 19.731267
9, shijiazhuang, blue, 0, 1575129692000, 22, 10.518092
9, shijiazhuang, blue, 0, 1575129693000, 34, 17.863021
9, shijiazhuang, blue, 0, 1575129694000, 28, 11.478909
9, shijiazhuang, blue, 0, 1575129695000, 16, 15.075524
9, shijiazhuang, blue, 0, 1575129696000, 16, 10.292127
9, shijiazhuang, blue, 0, 1575129697000, 22, 13.716012
9, shijiazhuang, blue, 0, 1575129698000, 32, 10.906551
9, shijiazhuang, blue, 0, 1575129699000, 19, 18.386868
1 devid location color devgroup ts temperature humidity
899 8 haerbing yellow 2 1575129697000 31 16.321497
900 8 haerbing yellow 2 1575129698000 25 15.864515
901 8 haerbing yellow 2 1575129699000 25 16.492443
902 9 sijiazhuang shijiazhuang blue 0 1575129600000 23 16.002889
903 9 sijiazhuang shijiazhuang blue 0 1575129601000 26 17.034610
904 9 sijiazhuang shijiazhuang blue 0 1575129602000 29 12.892319
905 9 sijiazhuang shijiazhuang blue 0 1575129603000 34 15.321807
906 9 sijiazhuang shijiazhuang blue 0 1575129604000 29 12.562642
907 9 sijiazhuang shijiazhuang blue 0 1575129605000 32 17.190246
908 9 sijiazhuang shijiazhuang blue 0 1575129606000 19 15.361774
909 9 sijiazhuang shijiazhuang blue 0 1575129607000 26 15.022364
910 9 sijiazhuang shijiazhuang blue 0 1575129608000 31 14.837084
911 9 sijiazhuang shijiazhuang blue 0 1575129609000 25 11.554289
912 9 sijiazhuang shijiazhuang blue 0 1575129610000 21 15.313973
913 9 sijiazhuang shijiazhuang blue 0 1575129611000 27 18.621783
914 9 sijiazhuang shijiazhuang blue 0 1575129612000 31 18.018101
915 9 sijiazhuang shijiazhuang blue 0 1575129613000 23 14.421450
916 9 sijiazhuang shijiazhuang blue 0 1575129614000 28 10.833142
917 9 sijiazhuang shijiazhuang blue 0 1575129615000 33 18.169837
918 9 sijiazhuang shijiazhuang blue 0 1575129616000 21 18.772730
919 9 sijiazhuang shijiazhuang blue 0 1575129617000 24 18.893146
920 9 sijiazhuang shijiazhuang blue 0 1575129618000 24 10.290187
921 9 sijiazhuang shijiazhuang blue 0 1575129619000 23 17.393345
922 9 sijiazhuang shijiazhuang blue 0 1575129620000 30 12.949215
923 9 sijiazhuang shijiazhuang blue 0 1575129621000 19 19.267621
924 9 sijiazhuang shijiazhuang blue 0 1575129622000 33 14.831735
925 9 sijiazhuang shijiazhuang blue 0 1575129623000 21 14.711125
926 9 sijiazhuang shijiazhuang blue 0 1575129624000 16 17.168485
927 9 sijiazhuang shijiazhuang blue 0 1575129625000 17 16.426433
928 9 sijiazhuang shijiazhuang blue 0 1575129626000 19 13.879050
929 9 sijiazhuang shijiazhuang blue 0 1575129627000 21 18.308168
930 9 sijiazhuang shijiazhuang blue 0 1575129628000 17 10.845681
931 9 sijiazhuang shijiazhuang blue 0 1575129629000 20 10.238272
932 9 sijiazhuang shijiazhuang blue 0 1575129630000 19 19.424976
933 9 sijiazhuang shijiazhuang blue 0 1575129631000 31 13.885909
934 9 sijiazhuang shijiazhuang blue 0 1575129632000 15 19.264740
935 9 sijiazhuang shijiazhuang blue 0 1575129633000 30 12.460645
936 9 sijiazhuang shijiazhuang blue 0 1575129634000 27 17.608036
937 9 sijiazhuang shijiazhuang blue 0 1575129635000 25 13.493812
938 9 sijiazhuang shijiazhuang blue 0 1575129636000 19 10.955939
939 9 sijiazhuang shijiazhuang blue 0 1575129637000 24 11.956587
940 9 sijiazhuang shijiazhuang blue 0 1575129638000 15 19.141381
941 9 sijiazhuang shijiazhuang blue 0 1575129639000 24 14.801530
942 9 sijiazhuang shijiazhuang blue 0 1575129640000 17 14.347318
943 9 sijiazhuang shijiazhuang blue 0 1575129641000 29 14.803237
944 9 sijiazhuang shijiazhuang blue 0 1575129642000 28 10.342297
945 9 sijiazhuang shijiazhuang blue 0 1575129643000 29 19.368282
946 9 sijiazhuang shijiazhuang blue 0 1575129644000 31 17.491654
947 9 sijiazhuang shijiazhuang blue 0 1575129645000 18 13.161736
948 9 sijiazhuang shijiazhuang blue 0 1575129646000 17 16.067354
949 9 sijiazhuang shijiazhuang blue 0 1575129647000 18 13.736465
950 9 sijiazhuang shijiazhuang blue 0 1575129648000 23 19.103276
951 9 sijiazhuang shijiazhuang blue 0 1575129649000 29 16.075892
952 9 sijiazhuang shijiazhuang blue 0 1575129650000 21 10.728566
953 9 sijiazhuang shijiazhuang blue 0 1575129651000 15 18.921849
954 9 sijiazhuang shijiazhuang blue 0 1575129652000 24 16.914709
955 9 sijiazhuang shijiazhuang blue 0 1575129653000 19 13.501651
956 9 sijiazhuang shijiazhuang blue 0 1575129654000 19 13.538347
957 9 sijiazhuang shijiazhuang blue 0 1575129655000 16 13.261095
958 9 sijiazhuang shijiazhuang blue 0 1575129656000 32 16.315746
959 9 sijiazhuang shijiazhuang blue 0 1575129657000 27 16.400939
960 9 sijiazhuang shijiazhuang blue 0 1575129658000 24 13.321819
961 9 sijiazhuang shijiazhuang blue 0 1575129659000 27 19.070181
962 9 sijiazhuang shijiazhuang blue 0 1575129660000 27 13.040922
963 9 sijiazhuang shijiazhuang blue 0 1575129661000 32 10.872530
964 9 sijiazhuang shijiazhuang blue 0 1575129662000 28 16.428657
965 9 sijiazhuang shijiazhuang blue 0 1575129663000 32 13.883854
966 9 sijiazhuang shijiazhuang blue 0 1575129664000 33 14.299554
967 9 sijiazhuang shijiazhuang blue 0 1575129665000 30 16.445130
968 9 sijiazhuang shijiazhuang blue 0 1575129666000 15 18.059404
969 9 sijiazhuang shijiazhuang blue 0 1575129667000 21 12.348847
970 9 sijiazhuang shijiazhuang blue 0 1575129668000 32 13.315378
971 9 sijiazhuang shijiazhuang blue 0 1575129669000 17 15.689507
972 9 sijiazhuang shijiazhuang blue 0 1575129670000 22 15.591808
973 9 sijiazhuang shijiazhuang blue 0 1575129671000 27 16.386065
974 9 sijiazhuang shijiazhuang blue 0 1575129672000 25 10.564803
975 9 sijiazhuang shijiazhuang blue 0 1575129673000 20 12.276544
976 9 sijiazhuang shijiazhuang blue 0 1575129674000 26 15.828786
977 9 sijiazhuang shijiazhuang blue 0 1575129675000 18 12.236420
978 9 sijiazhuang shijiazhuang blue 0 1575129676000 15 19.439522
979 9 sijiazhuang shijiazhuang blue 0 1575129677000 19 19.831531
980 9 sijiazhuang shijiazhuang blue 0 1575129678000 22 17.115744
981 9 sijiazhuang shijiazhuang blue 0 1575129679000 29 19.879456
982 9 sijiazhuang shijiazhuang blue 0 1575129680000 34 10.207136
983 9 sijiazhuang shijiazhuang blue 0 1575129681000 16 17.633523
984 9 sijiazhuang shijiazhuang blue 0 1575129682000 15 14.227873
985 9 sijiazhuang shijiazhuang blue 0 1575129683000 34 12.027768
986 9 sijiazhuang shijiazhuang blue 0 1575129684000 22 11.376610
987 9 sijiazhuang shijiazhuang blue 0 1575129685000 21 11.711299
988 9 sijiazhuang shijiazhuang blue 0 1575129686000 33 14.281126
989 9 sijiazhuang shijiazhuang blue 0 1575129687000 31 10.895302
990 9 sijiazhuang shijiazhuang blue 0 1575129688000 31 13.971350
991 9 sijiazhuang shijiazhuang blue 0 1575129689000 15 15.262790
992 9 sijiazhuang shijiazhuang blue 0 1575129690000 23 12.440568
993 9 sijiazhuang shijiazhuang blue 0 1575129691000 32 19.731267
994 9 sijiazhuang shijiazhuang blue 0 1575129692000 22 10.518092
995 9 sijiazhuang shijiazhuang blue 0 1575129693000 34 17.863021
996 9 sijiazhuang shijiazhuang blue 0 1575129694000 28 11.478909
997 9 sijiazhuang shijiazhuang blue 0 1575129695000 16 15.075524
998 9 sijiazhuang shijiazhuang blue 0 1575129696000 16 10.292127
999 9 sijiazhuang shijiazhuang blue 0 1575129697000 22 13.716012
1000 9 sijiazhuang shijiazhuang blue 0 1575129698000 32 10.906551
1001 9 sijiazhuang shijiazhuang blue 0 1575129699000 19 18.386868

8
importSampleData/go.mod Normal file
View File

@ -0,0 +1,8 @@
module github.com/taosdata/TDengine/importSampleData
go 1.13
require (
github.com/pelletier/go-toml v1.9.0 // indirect
github.com/taosdata/driver-go v0.0.0-20210415143420-d99751356e28 // indirect
)

View File

@ -14,23 +14,23 @@ var (
once sync.Once
)
// Config inclue all scene import config
// Config include all scene import config
type Config struct {
UserCases map[string]CaseConfig
}
// CaseConfig include the sample data config and tdengine config
type CaseConfig struct {
Format string
FilePath string
Separator string
Stname string
SubTableName string
Timestamp string
TimestampType string
TimestampTypeFormat string
Tags []FieldInfo
Fields []FieldInfo
Format string
FilePath string
Separator string
StName string
SubTableName string
Timestamp string
TimestampType string
TimestampTypeFormat string
Tags []FieldInfo
Fields []FieldInfo
}
// FieldInfo is field or tag info