merge develop
This commit is contained in:
commit
15f6335e98
|
@ -68,6 +68,8 @@ CMakeError.log
|
|||
*.o
|
||||
version.c
|
||||
taos.rc
|
||||
src/connector/jdbc/.classpath
|
||||
src/connector/jdbc/.project
|
||||
src/connector/jdbc/.settings/
|
||||
tests/comparisonTest/cassandra/cassandratest/.classpath
|
||||
tests/comparisonTest/cassandra/cassandratest/.project
|
||||
|
|
|
@ -69,6 +69,39 @@ mkdir -p ${install_dir}/inc && cp ${header_files} ${install_dir}/inc
|
|||
mkdir -p ${install_dir}/cfg && cp ${cfg_dir}/taos.cfg ${install_dir}/cfg/taos.cfg
|
||||
mkdir -p ${install_dir}/bin && cp ${bin_files} ${install_dir}/bin && chmod a+x ${install_dir}/bin/*
|
||||
|
||||
if [ -f ${build_dir}/bin/jemalloc-config ]; then
|
||||
mkdir -p ${install_dir}/jemalloc/{bin,lib,lib/pkgconfig,include/jemalloc,share/doc/jemalloc,share/man/man3}
|
||||
cp ${build_dir}/bin/jemalloc-config ${install_dir}/jemalloc/bin
|
||||
if [ -f ${build_dir}/bin/jemalloc.sh ]; then
|
||||
cp ${build_dir}/bin/jemalloc.sh ${install_dir}/jemalloc/bin
|
||||
fi
|
||||
if [ -f ${build_dir}/bin/jeprof ]; then
|
||||
cp ${build_dir}/bin/jeprof ${install_dir}/jemalloc/bin
|
||||
fi
|
||||
if [ -f ${build_dir}/include/jemalloc/jemalloc.h ]; then
|
||||
cp ${build_dir}/include/jemalloc/jemalloc.h ${install_dir}/jemalloc/include/jemalloc
|
||||
fi
|
||||
if [ -f ${build_dir}/lib/libjemalloc.so.2 ]; then
|
||||
cp ${build_dir}/lib/libjemalloc.so.2 ${install_dir}/jemalloc/lib
|
||||
ln -sf libjemalloc.so.2 ${install_dir}/jemalloc/lib/libjemalloc.so
|
||||
fi
|
||||
if [ -f ${build_dir}/lib/libjemalloc.a ]; then
|
||||
cp ${build_dir}/lib/libjemalloc.a ${install_dir}/jemalloc/lib
|
||||
fi
|
||||
if [ -f ${build_dir}/lib/libjemalloc_pic.a ]; then
|
||||
cp ${build_dir}/lib/libjemalloc_pic.a ${install_dir}/jemalloc/lib
|
||||
fi
|
||||
if [ -f ${build_dir}/lib/pkgconfig/jemalloc.pc ]; then
|
||||
cp ${build_dir}/lib/pkgconfig/jemalloc.pc ${install_dir}/jemalloc/lib/pkgconfig
|
||||
fi
|
||||
if [ -f ${build_dir}/share/doc/jemalloc/jemalloc.html ]; then
|
||||
cp ${build_dir}/share/doc/jemalloc/jemalloc.html ${install_dir}/jemalloc/share/doc/jemalloc
|
||||
fi
|
||||
if [ -f ${build_dir}/share/man/man3/jemalloc.3 ]; then
|
||||
cp ${build_dir}/share/man/man3/jemalloc.3 ${install_dir}/jemalloc/share/man/man3
|
||||
fi
|
||||
fi
|
||||
|
||||
cd ${install_dir}
|
||||
|
||||
if [ "$osType" != "Darwin" ]; then
|
||||
|
|
|
@ -91,6 +91,39 @@ else
|
|||
fi
|
||||
chmod a+x ${install_dir}/bin/* || :
|
||||
|
||||
if [ -f ${build_dir}/bin/jemalloc-config ]; then
|
||||
mkdir -p ${install_dir}/jemalloc/{bin,lib,lib/pkgconfig,include/jemalloc,share/doc/jemalloc,share/man/man3}
|
||||
cp ${build_dir}/bin/jemalloc-config ${install_dir}/jemalloc/bin
|
||||
if [ -f ${build_dir}/bin/jemalloc.sh ]; then
|
||||
cp ${build_dir}/bin/jemalloc.sh ${install_dir}/jemalloc/bin
|
||||
fi
|
||||
if [ -f ${build_dir}/bin/jeprof ]; then
|
||||
cp ${build_dir}/bin/jeprof ${install_dir}/jemalloc/bin
|
||||
fi
|
||||
if [ -f ${build_dir}/include/jemalloc/jemalloc.h ]; then
|
||||
cp ${build_dir}/include/jemalloc/jemalloc.h ${install_dir}/jemalloc/include/jemalloc
|
||||
fi
|
||||
if [ -f ${build_dir}/lib/libjemalloc.so.2 ]; then
|
||||
cp ${build_dir}/lib/libjemalloc.so.2 ${install_dir}/jemalloc/lib
|
||||
ln -sf libjemalloc.so.2 ${install_dir}/jemalloc/lib/libjemalloc.so
|
||||
fi
|
||||
if [ -f ${build_dir}/lib/libjemalloc.a ]; then
|
||||
cp ${build_dir}/lib/libjemalloc.a ${install_dir}/jemalloc/lib
|
||||
fi
|
||||
if [ -f ${build_dir}/lib/libjemalloc_pic.a ]; then
|
||||
cp ${build_dir}/lib/libjemalloc_pic.a ${install_dir}/jemalloc/lib
|
||||
fi
|
||||
if [ -f ${build_dir}/lib/pkgconfig/jemalloc.pc ]; then
|
||||
cp ${build_dir}/lib/pkgconfig/jemalloc.pc ${install_dir}/jemalloc/lib/pkgconfig
|
||||
fi
|
||||
if [ -f ${build_dir}/share/doc/jemalloc/jemalloc.html ]; then
|
||||
cp ${build_dir}/share/doc/jemalloc/jemalloc.html ${install_dir}/jemalloc/share/doc/jemalloc
|
||||
fi
|
||||
if [ -f ${build_dir}/share/man/man3/jemalloc.3 ]; then
|
||||
cp ${build_dir}/share/man/man3/jemalloc.3 ${install_dir}/jemalloc/share/man/man3
|
||||
fi
|
||||
fi
|
||||
|
||||
cd ${install_dir}
|
||||
|
||||
if [ "$osType" != "Darwin" ]; then
|
||||
|
|
|
@ -91,6 +91,39 @@ else
|
|||
fi
|
||||
chmod a+x ${install_dir}/bin/* || :
|
||||
|
||||
if [ -f ${build_dir}/bin/jemalloc-config ]; then
|
||||
mkdir -p ${install_dir}/jemalloc/{bin,lib,lib/pkgconfig,include/jemalloc,share/doc/jemalloc,share/man/man3}
|
||||
cp ${build_dir}/bin/jemalloc-config ${install_dir}/jemalloc/bin
|
||||
if [ -f ${build_dir}/bin/jemalloc.sh ]; then
|
||||
cp ${build_dir}/bin/jemalloc.sh ${install_dir}/jemalloc/bin
|
||||
fi
|
||||
if [ -f ${build_dir}/bin/jeprof ]; then
|
||||
cp ${build_dir}/bin/jeprof ${install_dir}/jemalloc/bin
|
||||
fi
|
||||
if [ -f ${build_dir}/include/jemalloc/jemalloc.h ]; then
|
||||
cp ${build_dir}/include/jemalloc/jemalloc.h ${install_dir}/jemalloc/include/jemalloc
|
||||
fi
|
||||
if [ -f ${build_dir}/lib/libjemalloc.so.2 ]; then
|
||||
cp ${build_dir}/lib/libjemalloc.so.2 ${install_dir}/jemalloc/lib
|
||||
ln -sf libjemalloc.so.2 ${install_dir}/jemalloc/lib/libjemalloc.so
|
||||
fi
|
||||
if [ -f ${build_dir}/lib/libjemalloc.a ]; then
|
||||
cp ${build_dir}/lib/libjemalloc.a ${install_dir}/jemalloc/lib
|
||||
fi
|
||||
if [ -f ${build_dir}/lib/libjemalloc_pic.a ]; then
|
||||
cp ${build_dir}/lib/libjemalloc_pic.a ${install_dir}/jemalloc/lib
|
||||
fi
|
||||
if [ -f ${build_dir}/lib/pkgconfig/jemalloc.pc ]; then
|
||||
cp ${build_dir}/lib/pkgconfig/jemalloc.pc ${install_dir}/jemalloc/lib/pkgconfig
|
||||
fi
|
||||
if [ -f ${build_dir}/share/doc/jemalloc/jemalloc.html ]; then
|
||||
cp ${build_dir}/share/doc/jemalloc/jemalloc.html ${install_dir}/jemalloc/share/doc/jemalloc
|
||||
fi
|
||||
if [ -f ${build_dir}/share/man/man3/jemalloc.3 ]; then
|
||||
cp ${build_dir}/share/man/man3/jemalloc.3 ${install_dir}/jemalloc/share/man/man3
|
||||
fi
|
||||
fi
|
||||
|
||||
cd ${install_dir}
|
||||
|
||||
if [ "$osType" != "Darwin" ]; then
|
||||
|
|
|
@ -8184,7 +8184,6 @@ static STableMeta* extractTempTableMetaFromSubquery(SQueryInfo* pUpstream) {
|
|||
STableMetaInfo* pUpstreamTableMetaInfo = tscGetMetaInfo(pUpstream, 0);
|
||||
|
||||
int32_t numOfColumns = pUpstream->fieldsInfo.numOfOutput;
|
||||
|
||||
STableMeta *meta = calloc(1, sizeof(STableMeta) + sizeof(SSchema) * numOfColumns);
|
||||
meta->tableType = TSDB_TEMP_TABLE;
|
||||
|
||||
|
|
|
@ -57,8 +57,8 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
|
|||
parameterCnt++;
|
||||
}
|
||||
}
|
||||
parameters = new Object[parameterCnt];
|
||||
}
|
||||
parameters = new Object[parameterCnt];
|
||||
|
||||
if (parameterCnt > 1) {
|
||||
// the table name is also a parameter, so ignore it.
|
||||
|
|
|
@ -22,16 +22,16 @@ public class RestfulPreparedStatement extends RestfulStatement implements Prepar
|
|||
super(conn, database);
|
||||
this.rawSql = sql;
|
||||
|
||||
if (sql.contains("?")) {
|
||||
int parameterCnt = 0;
|
||||
if (sql.contains("?")) {
|
||||
for (int i = 0; i < sql.length(); i++) {
|
||||
if ('?' == sql.charAt(i)) {
|
||||
parameterCnt++;
|
||||
}
|
||||
}
|
||||
parameters = new Object[parameterCnt];
|
||||
this.isPrepared = true;
|
||||
}
|
||||
parameters = new Object[parameterCnt];
|
||||
|
||||
// build parameterMetaData
|
||||
this.parameterMetaData = new RestfulParameterMetaData(parameters);
|
||||
|
|
|
@ -15,6 +15,8 @@ public class RestfulPreparedStatementTest {
|
|||
private static PreparedStatement pstmt_insert;
|
||||
private static final String sql_select = "select * from t1 where ts > ? and ts <= ? and f1 >= ?";
|
||||
private static PreparedStatement pstmt_select;
|
||||
private static final String sql_without_parameters = "select count(*) from t1";
|
||||
private static PreparedStatement pstmt_without_parameters;
|
||||
|
||||
@Test
|
||||
public void executeQuery() throws SQLException {
|
||||
|
@ -237,6 +239,7 @@ public class RestfulPreparedStatementTest {
|
|||
@Test
|
||||
public void clearParameters() throws SQLException {
|
||||
pstmt_insert.clearParameters();
|
||||
pstmt_without_parameters.clearParameters();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -382,6 +385,7 @@ public class RestfulPreparedStatementTest {
|
|||
|
||||
pstmt_insert = conn.prepareStatement(sql_insert);
|
||||
pstmt_select = conn.prepareStatement(sql_select);
|
||||
pstmt_without_parameters = conn.prepareStatement(sql_without_parameters);
|
||||
} catch (SQLException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
@ -394,6 +398,8 @@ public class RestfulPreparedStatementTest {
|
|||
pstmt_insert.close();
|
||||
if (pstmt_select != null)
|
||||
pstmt_select.close();
|
||||
if (pstmt_without_parameters != null)
|
||||
pstmt_without_parameters.close();
|
||||
if (conn != null)
|
||||
conn.close();
|
||||
} catch (SQLException e) {
|
||||
|
|
|
@ -15,36 +15,18 @@ const { NULL_POINTER } = require('ref-napi');
|
|||
|
||||
module.exports = CTaosInterface;
|
||||
|
||||
function convertMillisecondsToDatetime(time) {
|
||||
return new TaosObjects.TaosTimestamp(time);
|
||||
}
|
||||
function convertMicrosecondsToDatetime(time) {
|
||||
return new TaosObjects.TaosTimestamp(time * 0.001, true);
|
||||
}
|
||||
|
||||
function convertTimestamp(data, num_of_rows, nbytes = 0, offset = 0, micro = false) {
|
||||
timestampConverter = convertMillisecondsToDatetime;
|
||||
if (micro == true) {
|
||||
timestampConverter = convertMicrosecondsToDatetime;
|
||||
}
|
||||
function convertTimestamp(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
|
||||
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
|
||||
let res = [];
|
||||
let currOffset = 0;
|
||||
while (currOffset < data.length) {
|
||||
let queue = [];
|
||||
let time = 0;
|
||||
for (let i = currOffset; i < currOffset + nbytes; i++) {
|
||||
queue.push(data[i]);
|
||||
}
|
||||
for (let i = queue.length - 1; i >= 0; i--) {
|
||||
time += queue[i] * Math.pow(16, i * 2);
|
||||
}
|
||||
let time = data.readInt64LE(currOffset);
|
||||
currOffset += nbytes;
|
||||
res.push(timestampConverter(time));
|
||||
res.push(new TaosObjects.TaosTimestamp(time, precision));
|
||||
}
|
||||
return res;
|
||||
}
|
||||
function convertBool(data, num_of_rows, nbytes = 0, offset = 0, micro = false) {
|
||||
function convertBool(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
|
||||
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
|
||||
let res = new Array(data.length);
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
|
@ -60,7 +42,7 @@ function convertBool(data, num_of_rows, nbytes = 0, offset = 0, micro = false) {
|
|||
}
|
||||
return res;
|
||||
}
|
||||
function convertTinyint(data, num_of_rows, nbytes = 0, offset = 0, micro = false) {
|
||||
function convertTinyint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
|
||||
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
|
||||
let res = [];
|
||||
let currOffset = 0;
|
||||
|
@ -71,7 +53,7 @@ function convertTinyint(data, num_of_rows, nbytes = 0, offset = 0, micro = false
|
|||
}
|
||||
return res;
|
||||
}
|
||||
function convertSmallint(data, num_of_rows, nbytes = 0, offset = 0, micro = false) {
|
||||
function convertSmallint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
|
||||
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
|
||||
let res = [];
|
||||
let currOffset = 0;
|
||||
|
@ -82,7 +64,7 @@ function convertSmallint(data, num_of_rows, nbytes = 0, offset = 0, micro = fals
|
|||
}
|
||||
return res;
|
||||
}
|
||||
function convertInt(data, num_of_rows, nbytes = 0, offset = 0, micro = false) {
|
||||
function convertInt(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
|
||||
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
|
||||
let res = [];
|
||||
let currOffset = 0;
|
||||
|
@ -93,7 +75,7 @@ function convertInt(data, num_of_rows, nbytes = 0, offset = 0, micro = false) {
|
|||
}
|
||||
return res;
|
||||
}
|
||||
function convertBigint(data, num_of_rows, nbytes = 0, offset = 0, micro = false) {
|
||||
function convertBigint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
|
||||
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
|
||||
let res = [];
|
||||
let currOffset = 0;
|
||||
|
@ -104,7 +86,7 @@ function convertBigint(data, num_of_rows, nbytes = 0, offset = 0, micro = false)
|
|||
}
|
||||
return res;
|
||||
}
|
||||
function convertFloat(data, num_of_rows, nbytes = 0, offset = 0, micro = false) {
|
||||
function convertFloat(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
|
||||
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
|
||||
let res = [];
|
||||
let currOffset = 0;
|
||||
|
@ -115,7 +97,7 @@ function convertFloat(data, num_of_rows, nbytes = 0, offset = 0, micro = false)
|
|||
}
|
||||
return res;
|
||||
}
|
||||
function convertDouble(data, num_of_rows, nbytes = 0, offset = 0, micro = false) {
|
||||
function convertDouble(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
|
||||
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
|
||||
let res = [];
|
||||
let currOffset = 0;
|
||||
|
@ -127,7 +109,7 @@ function convertDouble(data, num_of_rows, nbytes = 0, offset = 0, micro = false)
|
|||
return res;
|
||||
}
|
||||
|
||||
function convertNchar(data, num_of_rows, nbytes = 0, offset = 0, micro = false) {
|
||||
function convertNchar(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
|
||||
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
|
||||
let res = [];
|
||||
|
||||
|
@ -272,7 +254,7 @@ CTaosInterface.prototype.config = function config() {
|
|||
CTaosInterface.prototype.connect = function connect(host = null, user = "root", password = "taosdata", db = null, port = 0) {
|
||||
let _host, _user, _password, _db, _port;
|
||||
try {
|
||||
_host = host != null ? ref.allocCString(host) : ref.alloc(ref.types.char_ptr, ref.NULL);
|
||||
_host = host != null ? ref.allocCString(host) : ref.NULL;
|
||||
}
|
||||
catch (err) {
|
||||
throw "Attribute Error: host is expected as a str";
|
||||
|
@ -290,7 +272,7 @@ CTaosInterface.prototype.connect = function connect(host = null, user = "root",
|
|||
throw "Attribute Error: password is expected as a str";
|
||||
}
|
||||
try {
|
||||
_db = db != null ? ref.allocCString(db) : ref.alloc(ref.types.char_ptr, ref.NULL);
|
||||
_db = db != null ? ref.allocCString(db) : ref.NULL;
|
||||
}
|
||||
catch (err) {
|
||||
throw "Attribute Error: db is expected as a str";
|
||||
|
@ -345,8 +327,7 @@ CTaosInterface.prototype.fetchBlock = function fetchBlock(result, fields) {
|
|||
}
|
||||
|
||||
var fieldL = this.libtaos.taos_fetch_lengths(result);
|
||||
|
||||
let isMicro = (this.libtaos.taos_result_precision(result) == FieldTypes.C_TIMESTAMP_MICRO);
|
||||
let precision = this.libtaos.taos_result_precision(result);
|
||||
|
||||
var fieldlens = [];
|
||||
|
||||
|
@ -373,7 +354,7 @@ CTaosInterface.prototype.fetchBlock = function fetchBlock(result, fields) {
|
|||
if (!convertFunctions[fields[i]['type']]) {
|
||||
throw new errors.DatabaseError("Invalid data type returned from database");
|
||||
}
|
||||
blocks[i] = convertFunctions[fields[i]['type']](pdata, num_of_rows, fieldlens[i], offset, isMicro);
|
||||
blocks[i] = convertFunctions[fields[i]['type']](pdata, num_of_rows, fieldlens[i], offset, precision);
|
||||
}
|
||||
}
|
||||
return { blocks: blocks, num_of_rows }
|
||||
|
@ -423,7 +404,7 @@ CTaosInterface.prototype.fetch_rows_a = function fetch_rows_a(result, callback,
|
|||
let row = cti.libtaos.taos_fetch_row(result2);
|
||||
let fields = cti.fetchFields_a(result2);
|
||||
|
||||
let isMicro = (cti.libtaos.taos_result_precision(result2) == FieldTypes.C_TIMESTAMP_MICRO);
|
||||
let precision = cti.libtaos.taos_result_precision(result2);
|
||||
let blocks = new Array(fields.length);
|
||||
blocks.fill(null);
|
||||
numOfRows2 = Math.abs(numOfRows2);
|
||||
|
@ -449,7 +430,7 @@ CTaosInterface.prototype.fetch_rows_a = function fetch_rows_a(result, callback,
|
|||
let prow = ref.reinterpret(row, 8, i * 8);
|
||||
prow = prow.readPointer();
|
||||
prow = ref.ref(prow);
|
||||
blocks[i] = convertFunctions[fields[i]['type']](prow, 1, fieldlens[i], offset, isMicro);
|
||||
blocks[i] = convertFunctions[fields[i]['type']](prow, 1, fieldlens[i], offset, precision);
|
||||
//offset += fields[i]['bytes'] * numOfRows2;
|
||||
}
|
||||
}
|
||||
|
@ -572,7 +553,7 @@ CTaosInterface.prototype.openStream = function openStream(connection, sql, callb
|
|||
var cti = this;
|
||||
let asyncCallbackWrapper = function (param2, result2, row) {
|
||||
let fields = cti.fetchFields_a(result2);
|
||||
let isMicro = (cti.libtaos.taos_result_precision(result2) == FieldTypes.C_TIMESTAMP_MICRO);
|
||||
let precision = cti.libtaos.taos_result_precision(result2);
|
||||
let blocks = new Array(fields.length);
|
||||
blocks.fill(null);
|
||||
let numOfRows2 = 1;
|
||||
|
@ -582,7 +563,7 @@ CTaosInterface.prototype.openStream = function openStream(connection, sql, callb
|
|||
if (!convertFunctions[fields[i]['type']]) {
|
||||
throw new errors.DatabaseError("Invalid data type returned from database");
|
||||
}
|
||||
blocks[i] = convertFunctions[fields[i]['type']](row, numOfRows2, fields[i]['bytes'], offset, isMicro);
|
||||
blocks[i] = convertFunctions[fields[i]['type']](row, numOfRows2, fields[i]['bytes'], offset, precision);
|
||||
offset += fields[i]['bytes'] * numOfRows2;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const FieldTypes = require('./constants');
|
||||
|
||||
const util = require('util');
|
||||
/**
|
||||
* Various objects such as TaosRow and TaosColumn that help make parsing data easier
|
||||
* @module TaosObjects
|
||||
|
@ -42,11 +42,71 @@ function TaosField(field) {
|
|||
* @param {Date} date - A Javascript date time object or the time in milliseconds past 1970-1-1 00:00:00.000
|
||||
*/
|
||||
class TaosTimestamp extends Date {
|
||||
constructor(date, micro = false) {
|
||||
super(date);
|
||||
this._type = 'TaosTimestamp';
|
||||
if (micro) {
|
||||
this.microTime = date - Math.floor(date);
|
||||
constructor(date, precision = 0) {
|
||||
if (precision === 1) {
|
||||
super(Math.floor(date / 1000));
|
||||
this.precisionExtras = date % 1000;
|
||||
} else if (precision === 2) {
|
||||
super(parseInt(date / 1000000));
|
||||
// use BigInt to fix: 1625801548423914405 % 1000000 = 914496 which not expected (914405)
|
||||
this.precisionExtras = parseInt(BigInt(date) % 1000000n);
|
||||
} else {
|
||||
super(parseInt(date));
|
||||
}
|
||||
this.precision = precision;
|
||||
}
|
||||
|
||||
/**
|
||||
* TDengine raw timestamp.
|
||||
* @returns raw taos timestamp (int64)
|
||||
*/
|
||||
taosTimestamp() {
|
||||
if (this.precision == 1) {
|
||||
return (this * 1000 + this.precisionExtras);
|
||||
} else if (this.precision == 2) {
|
||||
return (this * 1000000 + this.precisionExtras);
|
||||
} else {
|
||||
return Math.floor(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the microseconds of a Date.
|
||||
* @return {Int} A microseconds integer
|
||||
*/
|
||||
getMicroseconds() {
|
||||
if (this.precision == 1) {
|
||||
return this.getMilliseconds() * 1000 + this.precisionExtras;
|
||||
} else if (this.precision == 2) {
|
||||
return this.getMilliseconds() * 1000 + this.precisionExtras / 1000;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Gets the nanoseconds of a TaosTimestamp.
|
||||
* @return {Int} A nanoseconds integer
|
||||
*/
|
||||
getNanoseconds() {
|
||||
if (this.precision == 1) {
|
||||
return this.getMilliseconds() * 1000000 + this.precisionExtras * 1000;
|
||||
} else if (this.precision == 2) {
|
||||
return this.getMilliseconds() * 1000000 + this.precisionExtras;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {String} a string for timestamp string format
|
||||
*/
|
||||
_precisionExtra() {
|
||||
if (this.precision == 1) {
|
||||
return String(this.precisionExtras).padStart(3, '0');
|
||||
} else if (this.precision == 2) {
|
||||
return String(this.precisionExtras).padStart(6, '0');
|
||||
} else {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
/**
|
||||
|
@ -73,7 +133,18 @@ class TaosTimestamp extends Date {
|
|||
':' + pad(this.getMinutes()) +
|
||||
':' + pad(this.getSeconds()) +
|
||||
'.' + pad2(this.getMilliseconds()) +
|
||||
'' + (this.microTime ? pad2(Math.round(this.microTime * 1000)) : '');
|
||||
'' + this._precisionExtra();
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom console.log
|
||||
* @returns {String} string format for debug
|
||||
*/
|
||||
[util.inspect.custom](depth, opts) {
|
||||
return this.toTaosString() + JSON.stringify({ precision: this.precision, precisionExtras: this.precisionExtras }, opts);
|
||||
}
|
||||
toString() {
|
||||
return this.toTaosString();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
{
|
||||
"name": "td2.0-connector",
|
||||
"version": "2.0.8",
|
||||
"version": "2.0.9",
|
||||
"description": "A Node.js connector for TDengine.",
|
||||
"main": "tdengine.js",
|
||||
"directories": {
|
||||
"test": "test"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "node test/test.js"
|
||||
"test": "node test/test.js && node test/testMicroseconds.js && node test/testNanoseconds.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
var TDengineConnection = require('./nodetaos/connection.js')
|
||||
module.exports.connect = function (connection=null) {
|
||||
module.exports.connect = function (connection={}) {
|
||||
return new TDengineConnection(connection);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const taos = require('../tdengine');
|
||||
var conn = taos.connect({host:"127.0.0.1", user:"root", password:"taosdata", config:"/etc/taos",port:10});
|
||||
var conn = taos.connect();
|
||||
var c1 = conn.cursor();
|
||||
let stime = new Date();
|
||||
let interval = 1000;
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
const taos = require('../tdengine');
|
||||
var conn = taos.connect();
|
||||
var c1 = conn.cursor();
|
||||
let stime = new Date();
|
||||
let interval = 1000;
|
||||
|
||||
function convertDateToTS(date) {
|
||||
let tsArr = date.toISOString().split("T")
|
||||
return "\"" + tsArr[0] + " " + tsArr[1].substring(0, tsArr[1].length - 1) + "\"";
|
||||
}
|
||||
function R(l, r) {
|
||||
return Math.random() * (r - l) - r;
|
||||
}
|
||||
function randomBool() {
|
||||
if (Math.random() < 0.5) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Initialize
|
||||
//c1.execute('drop database td_connector_test;');
|
||||
const dbname = 'nodejs_test_us';
|
||||
c1.execute('create database if not exists ' + dbname + ' precision "us"');
|
||||
c1.execute('use ' + dbname)
|
||||
c1.execute('create table if not exists tstest (ts timestamp, _int int);');
|
||||
c1.execute('insert into tstest values(1625801548423914, 0)');
|
||||
// Select
|
||||
console.log('select * from tstest');
|
||||
c1.execute('select * from tstest');
|
||||
|
||||
var d = c1.fetchall();
|
||||
console.log(c1.fields);
|
||||
let ts = d[0][0];
|
||||
console.log(ts);
|
||||
|
||||
if (ts.taosTimestamp() != 1625801548423914) {
|
||||
throw "microseconds not match!";
|
||||
}
|
||||
if (ts.getMicroseconds() % 1000 !== 914) {
|
||||
throw "micronsecond precision error";
|
||||
}
|
||||
setTimeout(function () {
|
||||
c1.query('drop database nodejs_us_test;');
|
||||
}, 200);
|
||||
|
||||
setTimeout(function () {
|
||||
conn.close();
|
||||
}, 2000);
|
|
@ -0,0 +1,49 @@
|
|||
const taos = require('../tdengine');
|
||||
var conn = taos.connect();
|
||||
var c1 = conn.cursor();
|
||||
let stime = new Date();
|
||||
let interval = 1000;
|
||||
|
||||
function convertDateToTS(date) {
|
||||
let tsArr = date.toISOString().split("T")
|
||||
return "\"" + tsArr[0] + " " + tsArr[1].substring(0, tsArr[1].length - 1) + "\"";
|
||||
}
|
||||
function R(l, r) {
|
||||
return Math.random() * (r - l) - r;
|
||||
}
|
||||
function randomBool() {
|
||||
if (Math.random() < 0.5) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Initialize
|
||||
//c1.execute('drop database td_connector_test;');
|
||||
const dbname = 'nodejs_test_ns';
|
||||
c1.execute('create database if not exists ' + dbname + ' precision "ns"');
|
||||
c1.execute('use ' + dbname)
|
||||
c1.execute('create table if not exists tstest (ts timestamp, _int int);');
|
||||
c1.execute('insert into tstest values(1625801548423914405, 0)');
|
||||
// Select
|
||||
console.log('select * from tstest');
|
||||
c1.execute('select * from tstest');
|
||||
|
||||
var d = c1.fetchall();
|
||||
console.log(c1.fields);
|
||||
let ts = d[0][0];
|
||||
console.log(ts);
|
||||
|
||||
if (ts.taosTimestamp() != 1625801548423914405) {
|
||||
throw "nanosecond not match!";
|
||||
}
|
||||
if (ts.getNanoseconds() % 1000000 !== 914405) {
|
||||
throw "nanosecond precision error";
|
||||
}
|
||||
setTimeout(function () {
|
||||
c1.query('drop database nodejs_ns_test;');
|
||||
}, 200);
|
||||
|
||||
setTimeout(function () {
|
||||
conn.close();
|
||||
}, 2000);
|
|
@ -106,8 +106,7 @@ typedef struct SResultRowInfo {
|
|||
int16_t type:8; // data type for hash key
|
||||
int32_t size:24; // number of result set
|
||||
int32_t capacity; // max capacity
|
||||
SResultRow* current; // current start active index
|
||||
int64_t prevSKey; // previous (not completed) sliding window start key
|
||||
SResultRow* current; // current active result row
|
||||
} SResultRowInfo;
|
||||
|
||||
typedef struct SColumnFilterElem {
|
||||
|
|
|
@ -243,6 +243,7 @@ static void sortGroupResByOrderList(SGroupResInfo *pGroupResInfo, SQueryRuntimeE
|
|||
if (size <= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
int32_t orderId = pRuntimeEnv->pQueryAttr->order.orderColId;
|
||||
if (orderId <= 0) {
|
||||
return;
|
||||
|
@ -411,21 +412,6 @@ static void prepareResultListBuffer(SResultRowInfo* pResultRowInfo, SQueryRuntim
|
|||
pResultRowInfo->capacity = (int32_t)newCapacity;
|
||||
}
|
||||
|
||||
//static int32_t ascResultRowCompareFn(const void* p1, const void* p2) {
|
||||
// SResultRow* pRow1 = *(SResultRow**)p1;
|
||||
// SResultRow* pRow2 = *(SResultRow**)p2;
|
||||
//
|
||||
// if (pRow1 == pRow2) {
|
||||
// return 0;
|
||||
// } else {
|
||||
// return pRow1->win.skey < pRow2->win.skey? -1:1;
|
||||
// }
|
||||
//}
|
||||
|
||||
//static int32_t descResultRowCompareFn(const void* p1, const void* p2) {
|
||||
// return -ascResultRowCompareFn(p1, p2);
|
||||
//}
|
||||
|
||||
static SResultRow *doPrepareResultRowFromKey(SQueryRuntimeEnv *pRuntimeEnv, SResultRowInfo *pResultRowInfo, int64_t tid, char *pData,
|
||||
int16_t bytes, bool masterscan, uint64_t tableGroupId) {
|
||||
bool existed = false;
|
||||
|
@ -451,11 +437,6 @@ static SResultRow *doPrepareResultRowFromKey(SQueryRuntimeEnv *pRuntimeEnv, SRes
|
|||
SET_RES_WINDOW_KEY(pRuntimeEnv->keyBuf, pData, bytes, tid);
|
||||
void* ptr = taosHashGet(pRuntimeEnv->pResultRowListSet, pRuntimeEnv->keyBuf, GET_RES_WINDOW_KEY_LEN(bytes));
|
||||
existed = (ptr != NULL);
|
||||
// __compar_fn_t fn = QUERY_IS_ASC_QUERY(pRuntimeEnv->pQueryAttr)? ascResultRowCompareFn:descResultRowCompareFn;
|
||||
// void* ptr = taosbsearch(p1, pResultRowInfo->pResult, pResultRowInfo->size, POINTER_BYTES, fn, TD_EQ);
|
||||
// if (ptr != NULL) {
|
||||
// existed = true;
|
||||
// }
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -527,12 +508,12 @@ static STimeWindow getActiveTimeWindow(SResultRowInfo * pResultRowInfo, int64_t
|
|||
STimeWindow w = {0};
|
||||
|
||||
if (pResultRowInfo->current == NULL) { // the first window, from the previous stored value
|
||||
if (pResultRowInfo->prevSKey == TSKEY_INITIAL_VAL) {
|
||||
// if (pResultRowInfo->prevSKey == TSKEY_INITIAL_VAL) {
|
||||
getInitialStartTimeWindow(pQueryAttr, ts, &w);
|
||||
pResultRowInfo->prevSKey = w.skey;
|
||||
} else {
|
||||
w.skey = pResultRowInfo->prevSKey;
|
||||
}
|
||||
// pResultRowInfo->prevSKey = w.skey;
|
||||
// } else {
|
||||
// w.skey = pResultRowInfo->prevSKey;
|
||||
// }
|
||||
|
||||
if (pQueryAttr->interval.intervalUnit == 'n' || pQueryAttr->interval.intervalUnit == 'y') {
|
||||
w.ekey = taosTimeAdd(w.skey, pQueryAttr->interval.interval, pQueryAttr->interval.intervalUnit, pQueryAttr->precision) - 1;
|
||||
|
@ -540,10 +521,7 @@ static STimeWindow getActiveTimeWindow(SResultRowInfo * pResultRowInfo, int64_t
|
|||
w.ekey = w.skey + pQueryAttr->interval.interval - 1;
|
||||
}
|
||||
} else {
|
||||
// int32_t slot = curTimeWindowIndex(pResultRowInfo);
|
||||
// SResultRow* pWindowRes = getResultRow(pResultRowInfo, slot);
|
||||
SResultRow* pWindowRes = pResultRowInfo->current;
|
||||
w = pWindowRes->win;
|
||||
w = pResultRowInfo->current->win;
|
||||
}
|
||||
|
||||
if (w.skey > ts || w.ekey < ts) {
|
||||
|
@ -748,8 +726,6 @@ static void doUpdateResultRowIndex(SResultRowInfo*pResultRowInfo, TSKEY lastKey,
|
|||
} else {
|
||||
pResultRowInfo->current = pResultRowInfo->pResult[i + 1]; // current not closed result object
|
||||
}
|
||||
|
||||
pResultRowInfo->prevSKey = pResultRowInfo->current->win.skey;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1368,7 +1344,6 @@ static void hashIntervalAgg(SOperatorInfo* pOperatorInfo, SResultRowInfo* pResul
|
|||
bool ascQuery = QUERY_IS_ASC_QUERY(pQueryAttr);
|
||||
|
||||
SResultRow* prevRow = pResultRowInfo->current;
|
||||
// int32_t prevIndex = curTimeWindowIndex(pResultRowInfo);
|
||||
|
||||
TSKEY* tsCols = NULL;
|
||||
if (pSDataBlock->pDataBlock != NULL) {
|
||||
|
@ -1406,7 +1381,8 @@ static void hashIntervalAgg(SOperatorInfo* pOperatorInfo, SResultRowInfo* pResul
|
|||
j++;
|
||||
}
|
||||
|
||||
for(; pResultRowInfo->pResult[j] != pResultRowInfo->current; ++j) {
|
||||
SResultRow* current = pResultRowInfo->current;
|
||||
for(; pResultRowInfo->pResult[j] != current && j < pResultRowInfo->size; ++j) {
|
||||
SResultRow* pRes = pResultRowInfo->pResult[j];
|
||||
if (pRes->closed) {
|
||||
assert(resultRowInterpolated(pRes, RESULT_ROW_START_INTERP) && resultRowInterpolated(pRes, RESULT_ROW_END_INTERP));
|
||||
|
@ -1414,16 +1390,16 @@ static void hashIntervalAgg(SOperatorInfo* pOperatorInfo, SResultRowInfo* pResul
|
|||
}
|
||||
|
||||
STimeWindow w = pRes->win;
|
||||
ret = setWindowOutputBufByKey(pRuntimeEnv, pResultRowInfo, pSDataBlock->info.tid, &w, masterScan, &pResult, tableGroupId, pInfo->pCtx,
|
||||
numOfOutput, pInfo->rowCellInfoOffset);
|
||||
ret = setWindowOutputBufByKey(pRuntimeEnv, pResultRowInfo, pSDataBlock->info.tid, &w, masterScan, &pResult,
|
||||
tableGroupId, pInfo->pCtx, numOfOutput, pInfo->rowCellInfoOffset);
|
||||
if (ret != TSDB_CODE_SUCCESS) {
|
||||
longjmp(pRuntimeEnv->env, TSDB_CODE_QRY_OUT_OF_MEMORY);
|
||||
}
|
||||
|
||||
assert(!resultRowInterpolated(pResult, RESULT_ROW_END_INTERP));
|
||||
|
||||
doTimeWindowInterpolation(pOperatorInfo, pInfo, pSDataBlock->pDataBlock, *(TSKEY *)pRuntimeEnv->prevRow[0],
|
||||
-1, tsCols[startPos], startPos, w.ekey, RESULT_ROW_END_INTERP);
|
||||
doTimeWindowInterpolation(pOperatorInfo, pInfo, pSDataBlock->pDataBlock, *(TSKEY*)pRuntimeEnv->prevRow[0], -1,
|
||||
tsCols[startPos], startPos, w.ekey, RESULT_ROW_END_INTERP);
|
||||
|
||||
setResultRowInterpo(pResult, RESULT_ROW_END_INTERP);
|
||||
setNotInterpoWindowKey(pInfo->pCtx, pQueryAttr->numOfOutput, RESULT_ROW_START_INTERP);
|
||||
|
@ -3844,12 +3820,16 @@ void setParamForStableStddevByColData(SQueryRuntimeEnv* pRuntimeEnv, SQLFunction
|
|||
void setIntervalQueryRange(SQueryRuntimeEnv *pRuntimeEnv, TSKEY key) {
|
||||
SQueryAttr *pQueryAttr = pRuntimeEnv->pQueryAttr;
|
||||
STableQueryInfo *pTableQueryInfo = pRuntimeEnv->current;
|
||||
SResultRowInfo *pWindowResInfo = &pTableQueryInfo->resInfo;
|
||||
SResultRowInfo *pResultRowInfo = &pTableQueryInfo->resInfo;
|
||||
|
||||
if (pWindowResInfo->prevSKey != TSKEY_INITIAL_VAL) {
|
||||
if (pResultRowInfo->current != NULL) {
|
||||
return;
|
||||
}
|
||||
|
||||
// if (pWindowResInfo->prevSKey != TSKEY_INITIAL_VAL) {
|
||||
// return;
|
||||
// }
|
||||
|
||||
pTableQueryInfo->win.skey = key;
|
||||
STimeWindow win = {.skey = key, .ekey = pQueryAttr->window.ekey};
|
||||
|
||||
|
@ -3865,13 +3845,13 @@ void setIntervalQueryRange(SQueryRuntimeEnv *pRuntimeEnv, TSKEY key) {
|
|||
TSKEY ek = MAX(win.skey, win.ekey);
|
||||
getAlignQueryTimeWindow(pQueryAttr, win.skey, sk, ek, &w);
|
||||
|
||||
if (pWindowResInfo->prevSKey == TSKEY_INITIAL_VAL) {
|
||||
if (!QUERY_IS_ASC_QUERY(pQueryAttr)) {
|
||||
assert(win.ekey == pQueryAttr->window.ekey);
|
||||
}
|
||||
|
||||
pWindowResInfo->prevSKey = w.skey;
|
||||
}
|
||||
// if (pResultRowInfo->prevSKey == TSKEY_INITIAL_VAL) {
|
||||
// if (!QUERY_IS_ASC_QUERY(pQueryAttr)) {
|
||||
// assert(win.ekey == pQueryAttr->window.ekey);
|
||||
// }
|
||||
//
|
||||
// pResultRowInfo->prevSKey = w.skey;
|
||||
// }
|
||||
|
||||
pTableQueryInfo->lastKey = pTableQueryInfo->win.skey;
|
||||
}
|
||||
|
@ -4772,7 +4752,7 @@ static SSDataBlock* doTableScan(void* param, bool *newgroup) {
|
|||
|
||||
if (pResultRowInfo->size > 0) {
|
||||
pResultRowInfo->current = pResultRowInfo->pResult[0];
|
||||
pResultRowInfo->prevSKey = pResultRowInfo->pResult[0]->win.skey;
|
||||
// pResultRowInfo->prevSKey = pResultRowInfo->pResult[0]->win.skey;
|
||||
}
|
||||
|
||||
qDebug("QInfo:0x%"PRIx64" start to repeat scan data blocks due to query func required, qrange:%" PRId64 "-%" PRId64,
|
||||
|
@ -4798,7 +4778,7 @@ static SSDataBlock* doTableScan(void* param, bool *newgroup) {
|
|||
|
||||
if (pResultRowInfo->size > 0) {
|
||||
pResultRowInfo->current = pResultRowInfo->pResult[pResultRowInfo->size - 1];
|
||||
pResultRowInfo->prevSKey = pResultRowInfo->current->win.skey;
|
||||
// pResultRowInfo->prevSKey = pResultRowInfo->current->win.skey;
|
||||
}
|
||||
|
||||
p = doTableScanImpl(pOperator, newgroup);
|
||||
|
|
|
@ -44,7 +44,7 @@ int32_t getOutputInterResultBufSize(SQueryAttr* pQueryAttr) {
|
|||
int32_t initResultRowInfo(SResultRowInfo *pResultRowInfo, int32_t size, int16_t type) {
|
||||
pResultRowInfo->type = type;
|
||||
pResultRowInfo->size = 0;
|
||||
pResultRowInfo->prevSKey = TSKEY_INITIAL_VAL;
|
||||
// pResultRowInfo->prevSKey = TSKEY_INITIAL_VAL;
|
||||
pResultRowInfo->current = NULL;
|
||||
pResultRowInfo->capacity = size;
|
||||
|
||||
|
@ -93,7 +93,7 @@ void resetResultRowInfo(SQueryRuntimeEnv *pRuntimeEnv, SResultRowInfo *pResultRo
|
|||
|
||||
pResultRowInfo->size = 0;
|
||||
pResultRowInfo->current = NULL;
|
||||
pResultRowInfo->prevSKey = TSKEY_INITIAL_VAL;
|
||||
// pResultRowInfo->prevSKey = TSKEY_INITIAL_VAL;
|
||||
}
|
||||
|
||||
int32_t numOfClosedResultRows(SResultRowInfo *pResultRowInfo) {
|
||||
|
|
|
@ -450,4 +450,44 @@ if $data11 != 1 then
|
|||
return -1
|
||||
endi
|
||||
|
||||
print =====================>TD-5157
|
||||
sql select twa(c1) from nest_tb1 interval(19a);
|
||||
if $rows != 10000 then
|
||||
return -1
|
||||
endi
|
||||
|
||||
if $data00 != @20-09-14 23:59:59.992@ then
|
||||
return -1
|
||||
endi
|
||||
|
||||
if $data01 != 0.000083333 then
|
||||
return -1
|
||||
endi
|
||||
|
||||
print =================>us database interval query, TD-5039
|
||||
sql create database test precision 'us';
|
||||
sql use test;
|
||||
sql create table t1(ts timestamp, k int);
|
||||
sql insert into t1 values('2020-01-01 01:01:01.000', 1) ('2020-01-01 01:02:00.000', 2);
|
||||
sql select avg(k) from (select avg(k) k from t1 interval(1s)) interval(1m);
|
||||
if $rows != 2 then
|
||||
return -1
|
||||
endi
|
||||
|
||||
if $data00 != @20-01-01 01:01:00.000000@ then
|
||||
return -1
|
||||
endi
|
||||
|
||||
if $data01 != 1.000000000 then
|
||||
return -1
|
||||
endi
|
||||
|
||||
if $data10 != @20-01-01 01:02:00.000000@ then
|
||||
return -1
|
||||
endi
|
||||
|
||||
if $data11 != 2.000000000 then
|
||||
return -1
|
||||
endi
|
||||
|
||||
system sh/exec.sh -n dnode1 -s stop -x SIGINT
|
Loading…
Reference in New Issue