diff --git a/source/libs/index/src/indexFstDfa.c b/source/libs/index/src/indexFstDfa.c index 046ed0f4f4..3a36010b42 100644 --- a/source/libs/index/src/indexFstDfa.c +++ b/source/libs/index/src/indexFstDfa.c @@ -19,8 +19,8 @@ const static uint32_t STATE_LIMIT = 1000; static int dfaInstsEqual(const void *a, const void *b, size_t size) { - SArray *ar = (SArray *)a; - SArray *br = (SArray *)b; + SArray *ar = *(SArray **)a; + SArray *br = *(SArray **)b; size_t al = ar != NULL ? taosArrayGetSize(ar) : 0; size_t bl = br != NULL ? taosArrayGetSize(br) : 0; if (al != bl) { @@ -71,8 +71,8 @@ FstDfa *dfaBuilderBuild(FstDfaBuilder *builder) { dfaAdd(builder->dfa, cur, 0); - SArray *states = taosArrayInit(0, sizeof(uint32_t)); uint32_t result; + SArray *states = taosArrayInit(0, sizeof(uint32_t)); if (dfaBuilderCacheState(builder, cur, &result)) { taosArrayPush(states, &result); } @@ -146,10 +146,9 @@ bool dfaBuilderCacheState(FstDfaBuilder *builder, FstSparseSet *set, uint32_t *r *result = *v; taosArrayDestroy(tinsts); } else { - DfaState st; - st.insts = tinsts; - st.isMatch = isMatch; + DfaState st = {.insts = tinsts, .isMatch = isMatch}; taosArrayPush(builder->dfa->states, &st); + int32_t sz = taosArrayGetSize(builder->dfa->states) - 1; taosHashPut(builder->cache, &tinsts, sizeof(POINTER_BYTES), &sz, sizeof(sz)); *result = sz; diff --git a/source/libs/index/src/indexFstFile.c b/source/libs/index/src/indexFstFile.c index 6036a06eaa..4f278c7af6 100644 --- a/source/libs/index/src/indexFstFile.c +++ b/source/libs/index/src/indexFstFile.c @@ -85,11 +85,12 @@ static int idxFileCtxDoReadFrom(IFileCtx* ctx, uint8_t* buf, int len, int32_t of blk->blockId = blkId; blk->nread = taosPReadFile(ctx->file.pFile, blk->buf, kBlockSize, blkId * kBlockSize); assert(blk->nread <= kBlockSize); - nread = TMIN(blkLeft, len); if (blk->nread < kBlockSize && blk->nread < len) { break; } + + nread = TMIN(blkLeft, len); memcpy(buf + total, blk->buf + blkOffset, nread); LRUStatus s = taosLRUCacheInsert(ctx->lru, key, strlen(key), blk, cacheMemSize, deleteDataBlockFromLRU, NULL, diff --git a/source/libs/index/src/indexFstSparse.c b/source/libs/index/src/indexFstSparse.c index 60eb7afd90..ebc0cb3637 100644 --- a/source/libs/index/src/indexFstSparse.c +++ b/source/libs/index/src/indexFstSparse.c @@ -78,8 +78,8 @@ bool sparSetContains(FstSparseSet *ss, int32_t ip) { if (ip >= ss->cap || ip < 0) { return false; } - int32_t i = ss->sparse[ip]; + int32_t i = ss->sparse[ip]; if (i >= 0 && i < ss->cap && i < ss->size && ss->dense[i] == ip) { return true; } else { diff --git a/tests/system-test/2-query/json_tag_large_tables.py b/tests/system-test/2-query/json_tag_large_tables.py index fc41858580..5d7df6ceb8 100644 --- a/tests/system-test/2-query/json_tag_large_tables.py +++ b/tests/system-test/2-query/json_tag_large_tables.py @@ -43,13 +43,48 @@ class TDTestCase: tdSql.execute('create database db vgroups 1') tdSql.execute('use db') print("============== STEP 1 ===== prepare data & validate json string") + + i = 0 + # add 100000 table + tdSql.execute("create table if not exists jsons1(ts timestamp, dataInt int, dataBool bool, dataStr nchar(50), dataStrBin binary(150)) tags(jtag json)") + while i <= 10 0000: + sql = """insert into jsons1_{%d} using jsons1 tags('{"tag1":{%d}}') values(1591060618000, 1, false, 'json1', '你是') (1591060608000, 23, true, '等等', 'json')"""%(i, i) + tdSql.execute(sql) + i = i + 1 + + // do query + i = 0 + while i <= 10 0000: + sql = """select count(*) from jsons1 where jtag->'tag1' = %d"""%(i) + tdSql.query(sql) + if 1 != tdSql.getRows(): + print("err: %s"%(sql)) + + while i <= 10000000 + sql = """insert into jsons1_{%d} using jsons1 tags('{"tag1":{%d}}') values(1591060618000, 1, false, 'json1', '你是') (1591060608000, 23, true, '等等', 'json')"""%(i, i) + tdSql.execute(sql) + i = i + 1 + + i = 0 + # drop super table + tdSql.execute("create table if not exists jsons1(ts timestamp, dataInt int, dataBool bool, dataStr nchar(50), dataStrBin binary(150)) tags(jtag json)") + while i <= 100000: + sql = """insert into jsons1_{%d} using jsons1 tags('{"tag1":{%d}}') values(1591060618000, 1, false, 'json1', '你是') (1591060608000, 23, true, '等等', 'json')"""%(i, i) + tdSql.execute(sql) + i = i + 1 + + tdSql.execute('drop stable jsons1') + + + # drop database i = 0 tdSql.execute("create table if not exists jsons1(ts timestamp, dataInt int, dataBool bool, dataStr nchar(50), dataStrBin binary(150)) tags(jtag json)") while i <= 100000: - f = "insert into jsons1_{} using jsons1 tags('{\"tag1\":\"fff\",\"tag2\":{}, \"tag3\":true}') values(1591060618000, 1, false, 'json1', '你是') (1591060608000, 23, true, '等等', 'json')".format - sql = f(i, i) + sql = """insert into jsons1_{%d} using jsons1 tags('{"tag1":{%d}}') values(1591060618000, 1, false, 'json1', '你是') (1591060608000, 23, true, '等等', 'json')"""%(i, i) tdSql.execute(sql) i = i + 1 + tdSql.execute('drop database db') + # test duplicate key using the first one. elimate empty key #tdSql.execute("CREATE TABLE if not exists jsons1_8 using jsons1 tags('{\"tag1\":null, \"tag1\":true, \"tag1\":45, \"1tag$\":2, \" \":90, \"\":32}')") tdSql.query("select jtag from jsons1_8") tdSql.checkRows(0);