diff --git a/cmake/install.inc b/cmake/install.inc
index 746e493a17..4b2d4828f8 100755
--- a/cmake/install.inc
+++ b/cmake/install.inc
@@ -32,7 +32,7 @@ ELSEIF (TD_WINDOWS)
#INSTALL(TARGETS taos RUNTIME DESTINATION driver)
#INSTALL(TARGETS shell RUNTIME DESTINATION .)
IF (TD_MVN_INSTALLED)
- INSTALL(FILES ${LIBRARY_OUTPUT_PATH}/taos-jdbcdriver-2.0.8-dist.jar DESTINATION connector/jdbc)
+ INSTALL(FILES ${LIBRARY_OUTPUT_PATH}/taos-jdbcdriver-2.0.13-dist.jar DESTINATION connector/jdbc)
ENDIF ()
ELSEIF (TD_DARWIN)
SET(TD_MAKE_INSTALL_SH "${TD_COMMUNITY_DIR}/packaging/tools/make_install.sh")
diff --git a/src/connector/jdbc/CMakeLists.txt b/src/connector/jdbc/CMakeLists.txt
index c565853ab0..701a39b209 100644
--- a/src/connector/jdbc/CMakeLists.txt
+++ b/src/connector/jdbc/CMakeLists.txt
@@ -8,7 +8,7 @@ IF (TD_MVN_INSTALLED)
ADD_CUSTOM_COMMAND(OUTPUT ${JDBC_CMD_NAME}
POST_BUILD
COMMAND mvn -Dmaven.test.skip=true install -f ${CMAKE_CURRENT_SOURCE_DIR}/pom.xml
- COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_SOURCE_DIR}/target/taos-jdbcdriver-2.0.8-dist.jar ${LIBRARY_OUTPUT_PATH}
+ COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_SOURCE_DIR}/target/taos-jdbcdriver-2.0.13-dist.jar ${LIBRARY_OUTPUT_PATH}
COMMAND mvn -Dmaven.test.skip=true clean -f ${CMAKE_CURRENT_SOURCE_DIR}/pom.xml
COMMENT "build jdbc driver")
ADD_CUSTOM_TARGET(${JDBC_TARGET_NAME} ALL WORKING_DIRECTORY ${EXECUTABLE_OUTPUT_PATH} DEPENDS ${JDBC_CMD_NAME})
diff --git a/src/connector/jdbc/deploy-pom.xml b/src/connector/jdbc/deploy-pom.xml
index 51db837c7b..1dc2625e62 100755
--- a/src/connector/jdbc/deploy-pom.xml
+++ b/src/connector/jdbc/deploy-pom.xml
@@ -5,7 +5,7 @@
com.taosdata.jdbc
taos-jdbcdriver
- 2.0.10
+ 2.0.13
jar
JDBCDriver
@@ -49,17 +49,29 @@
-
- org.apache.commons
- commons-lang3
- 3.5
-
junit
junit
4.13
test
+
+
+
+ org.apache.httpcomponents
+ httpclient
+ 4.5.8
+
+
+ org.apache.commons
+ commons-lang3
+ 3.9
+
+
+ com.alibaba
+ fastjson
+ 1.2.58
+
diff --git a/src/connector/jdbc/pom.xml b/src/connector/jdbc/pom.xml
index e7124a0599..3d1f402435 100755
--- a/src/connector/jdbc/pom.xml
+++ b/src/connector/jdbc/pom.xml
@@ -3,7 +3,7 @@
4.0.0
com.taosdata.jdbc
taos-jdbcdriver
- 2.0.8
+ 2.0.13
jar
JDBCDriver
https://github.com/taosdata/TDengine/tree/master/src/connector/jdbc
@@ -112,6 +112,13 @@
maven-surefire-plugin
2.12.4
+
+ **/*Test.java
+
+
+ **/BatchInsertTest.java
+ **/FailOverTest.java
+
true
diff --git a/src/connector/jdbc/src/main/java/com/taosdata/jdbc/rs/RestfulConnection.java b/src/connector/jdbc/src/main/java/com/taosdata/jdbc/rs/RestfulConnection.java
index b82efca3ef..6b0937a9b7 100644
--- a/src/connector/jdbc/src/main/java/com/taosdata/jdbc/rs/RestfulConnection.java
+++ b/src/connector/jdbc/src/main/java/com/taosdata/jdbc/rs/RestfulConnection.java
@@ -15,7 +15,6 @@ public class RestfulConnection implements Connection {
private final String database;
private final String url;
-
public RestfulConnection(String host, String port, Properties props, String database, String url) {
this.host = host;
this.port = Integer.parseInt(port);
@@ -28,7 +27,7 @@ public class RestfulConnection implements Connection {
public Statement createStatement() throws SQLException {
if (isClosed())
throw new SQLException(TSDBConstants.WrapErrMsg("restful TDengine connection is closed."));
- return new RestfulStatement(this, this.database);
+ return new RestfulStatement(this, database);
}
@Override
@@ -104,22 +103,28 @@ public class RestfulConnection implements Connection {
@Override
public void setTransactionIsolation(int level) throws SQLException {
-
+ //transaction is not supported
+ throw new SQLFeatureNotSupportedException("transactions are not supported");
}
+ /**
+ *
+ */
@Override
public int getTransactionIsolation() throws SQLException {
- return 0;
+ //Connection.TRANSACTION_NONE specifies that transactions are not supported.
+ return Connection.TRANSACTION_NONE;
}
@Override
public SQLWarning getWarnings() throws SQLException {
+ //TODO: getWarnings not implemented
return null;
}
@Override
public void clearWarnings() throws SQLException {
-
+ throw new SQLFeatureNotSupportedException("clearWarnings not supported.");
}
@Override
@@ -209,22 +214,26 @@ public class RestfulConnection implements Connection {
@Override
public Clob createClob() throws SQLException {
- return null;
+ //TODO: not supported
+ throw new SQLFeatureNotSupportedException();
}
@Override
public Blob createBlob() throws SQLException {
- return null;
+ //TODO: not supported
+ throw new SQLFeatureNotSupportedException();
}
@Override
public NClob createNClob() throws SQLException {
- return null;
+ //TODO: not supported
+ throw new SQLFeatureNotSupportedException();
}
@Override
public SQLXML createSQLXML() throws SQLException {
- return null;
+ //TODO: not supported
+ throw new SQLFeatureNotSupportedException();
}
@Override
@@ -254,12 +263,14 @@ public class RestfulConnection implements Connection {
@Override
public Array createArrayOf(String typeName, Object[] elements) throws SQLException {
- return null;
+ //TODO: not supported
+ throw new SQLFeatureNotSupportedException();
}
@Override
public Struct createStruct(String typeName, Object[] attributes) throws SQLException {
- return null;
+ //TODO: not supported
+ throw new SQLFeatureNotSupportedException();
}
@Override
@@ -289,12 +300,16 @@ public class RestfulConnection implements Connection {
@Override
public T unwrap(Class iface) throws SQLException {
- return null;
+ try {
+ return iface.cast(this);
+ } catch (ClassCastException cce) {
+ throw new SQLException("Unable to unwrap to " + iface.toString());
+ }
}
@Override
public boolean isWrapperFor(Class> iface) throws SQLException {
- return false;
+ return iface.isInstance(this);
}
public String getHost() {
diff --git a/src/connector/jdbc/src/main/java/com/taosdata/jdbc/rs/RestfulDriver.java b/src/connector/jdbc/src/main/java/com/taosdata/jdbc/rs/RestfulDriver.java
index c267f660de..9e87cfa680 100644
--- a/src/connector/jdbc/src/main/java/com/taosdata/jdbc/rs/RestfulDriver.java
+++ b/src/connector/jdbc/src/main/java/com/taosdata/jdbc/rs/RestfulDriver.java
@@ -35,7 +35,7 @@ public class RestfulDriver extends AbstractTaosDriver {
Properties props = parseURL(url, info);
String host = props.getProperty(TSDBDriver.PROPERTY_KEY_HOST, "localhost");
String port = props.getProperty(TSDBDriver.PROPERTY_KEY_PORT, "6041");
- String database = props.getProperty(TSDBDriver.PROPERTY_KEY_DBNAME);
+ String database = props.containsKey(TSDBDriver.PROPERTY_KEY_DBNAME) ? props.getProperty(TSDBDriver.PROPERTY_KEY_DBNAME) : null;
String loginUrl = "http://" + props.getProperty(TSDBDriver.PROPERTY_KEY_HOST) + ":"
+ props.getProperty(TSDBDriver.PROPERTY_KEY_PORT) + "/rest/login/"
@@ -86,6 +86,7 @@ public class RestfulDriver extends AbstractTaosDriver {
@Override
public Logger getParentLogger() throws SQLFeatureNotSupportedException {
- return null;
+ //TODO SQLFeatureNotSupportedException
+ throw new SQLFeatureNotSupportedException();
}
}
diff --git a/src/connector/jdbc/src/main/java/com/taosdata/jdbc/rs/RestfulStatement.java b/src/connector/jdbc/src/main/java/com/taosdata/jdbc/rs/RestfulStatement.java
index 20510f0135..30b56638d8 100644
--- a/src/connector/jdbc/src/main/java/com/taosdata/jdbc/rs/RestfulStatement.java
+++ b/src/connector/jdbc/src/main/java/com/taosdata/jdbc/rs/RestfulStatement.java
@@ -4,6 +4,7 @@ import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.taosdata.jdbc.TSDBConstants;
import com.taosdata.jdbc.rs.util.HttpClientPoolUtil;
+import com.taosdata.jdbc.utils.SqlSyntaxValidator;
import java.sql.*;
import java.util.Arrays;
@@ -11,19 +12,23 @@ import java.util.List;
public class RestfulStatement implements Statement {
- private final String catalog;
+ private boolean closed;
+ private String database;
private final RestfulConnection conn;
- public RestfulStatement(RestfulConnection c, String catalog) {
+ public RestfulStatement(RestfulConnection c, String database) {
this.conn = c;
- this.catalog = catalog;
+ this.database = database;
}
@Override
public ResultSet executeQuery(String sql) throws SQLException {
+ if (isClosed())
+ throw new SQLException("statement already closed");
+ if (!SqlSyntaxValidator.isSelectSql(sql))
+ throw new SQLException("not a select sql for executeQuery: " + sql);
- final String url = "http://" + conn.getHost() + ":"+conn.getPort()+"/rest/sql";
-
+ final String url = "http://" + conn.getHost() + ":" + conn.getPort() + "/rest/sql";
String result = HttpClientPoolUtil.execute(url, sql);
String fields = "";
List words = Arrays.asList(sql.split(" "));
@@ -65,12 +70,29 @@ public class RestfulStatement implements Statement {
@Override
public int executeUpdate(String sql) throws SQLException {
- return 0;
+ if (isClosed())
+ throw new SQLException("statement already closed");
+ if (!SqlSyntaxValidator.isValidForExecuteUpdate(sql))
+ throw new SQLException("not a valid sql for executeUpdate: " + sql);
+
+ if (this.database == null)
+ throw new SQLException("Database not specified or available");
+
+ final String url = "http://" + conn.getHost() + ":" + conn.getPort() + "/rest/sql";
+ HttpClientPoolUtil.execute(url, "use " + conn.getDatabase());
+ String result = HttpClientPoolUtil.execute(url, sql);
+ JSONObject jsonObject = JSON.parseObject(result);
+ if (jsonObject.getString("status").equals("error")) {
+ throw new SQLException(TSDBConstants.WrapErrMsg("SQL execution error: " +
+ jsonObject.getString("desc") + "\n" +
+ "error code: " + jsonObject.getString("code")));
+ }
+ return Integer.parseInt(jsonObject.getString("rows"));
}
@Override
public void close() throws SQLException {
-
+ this.closed = true;
}
@Override
@@ -115,6 +137,7 @@ public class RestfulStatement implements Statement {
@Override
public SQLWarning getWarnings() throws SQLException {
+ //TODO: getWarnings not Implemented
return null;
}
@@ -130,7 +153,29 @@ public class RestfulStatement implements Statement {
@Override
public boolean execute(String sql) throws SQLException {
- return false;
+ if (isClosed()) {
+ throw new SQLException("Invalid method call on a closed statement.");
+ }
+ //如果执行了use操作应该将当前Statement的catalog设置为新的database
+ if (SqlSyntaxValidator.isUseSql(sql)) {
+ this.database = sql.trim().replace("use", "").trim();
+ }
+ if (this.database == null)
+ throw new SQLException("Database not specified or available");
+
+ final String url = "http://" + conn.getHost() + ":" + conn.getPort() + "/rest/sql";
+ // use database
+ HttpClientPoolUtil.execute(url, "use " + conn.getDatabase());
+ // execute sql
+ String result = HttpClientPoolUtil.execute(url, sql);
+ // parse result
+ JSONObject jsonObject = JSON.parseObject(result);
+ if (jsonObject.getString("status").equals("error")) {
+ throw new SQLException(TSDBConstants.WrapErrMsg("SQL execution error: " +
+ jsonObject.getString("desc") + "\n" +
+ "error code: " + jsonObject.getString("code")));
+ }
+ return true;
}
@Override
@@ -245,7 +290,7 @@ public class RestfulStatement implements Statement {
@Override
public boolean isClosed() throws SQLException {
- return false;
+ return closed;
}
@Override
@@ -270,11 +315,15 @@ public class RestfulStatement implements Statement {
@Override
public T unwrap(Class iface) throws SQLException {
- return null;
+ try {
+ return iface.cast(this);
+ } catch (ClassCastException cce) {
+ throw new SQLException("Unable to unwrap to " + iface.toString());
+ }
}
@Override
public boolean isWrapperFor(Class> iface) throws SQLException {
- return false;
+ return iface.isInstance(this);
}
}
diff --git a/src/connector/jdbc/src/main/java/com/taosdata/jdbc/utils/SqlSyntaxValidator.java b/src/connector/jdbc/src/main/java/com/taosdata/jdbc/utils/SqlSyntaxValidator.java
index 066dfad5d5..388c3978be 100644
--- a/src/connector/jdbc/src/main/java/com/taosdata/jdbc/utils/SqlSyntaxValidator.java
+++ b/src/connector/jdbc/src/main/java/com/taosdata/jdbc/utils/SqlSyntaxValidator.java
@@ -22,6 +22,9 @@ import java.sql.SQLException;
public class SqlSyntaxValidator {
+ private static final String[] updateSQL = {"insert", "update", "delete", "create", "alter", "drop", "show", "describe", "use"};
+ private static final String[] querySQL = {"select"};
+
private TSDBConnection tsdbConnection;
public SqlSyntaxValidator(Connection connection) {
@@ -34,7 +37,7 @@ public class SqlSyntaxValidator {
if (tsdbConnection == null || tsdbConnection.isClosed()) {
throw new SQLException("invalid connection");
} else {
- TSDBJNIConnector jniConnector = tsdbConnection.getConnection();
+ TSDBJNIConnector jniConnector = tsdbConnection.getConnection();
if (jniConnector == null) {
throw new SQLException("jniConnector is null");
} else {
@@ -43,4 +46,28 @@ public class SqlSyntaxValidator {
}
return res;
}
+
+ public static boolean isValidForExecuteUpdate(String sql) {
+ for (String prefix : updateSQL) {
+ if (sql.trim().toLowerCase().startsWith(prefix))
+ return true;
+ }
+ return false;
+ }
+
+ public static boolean isUseSql(String sql) {
+ return sql.trim().toLowerCase().startsWith(updateSQL[8]) || sql.trim().toLowerCase().matches("create\\s*database.*") || sql.toLowerCase().toLowerCase().matches("drop\\s*database.*");
+ }
+
+ public static boolean isUpdateSql(String sql) {
+ return sql.trim().toLowerCase().startsWith(updateSQL[1]);
+ }
+
+ public static boolean isInsertSql(String sql) {
+ return sql.trim().toLowerCase().startsWith(updateSQL[0]);
+ }
+
+ public static boolean isSelectSql(String sql) {
+ return sql.trim().toLowerCase().startsWith(querySQL[0]);
+ }
}
diff --git a/src/connector/jdbc/src/test/java/com/taosdata/jdbc/rs/RestfulDriverTest.java b/src/connector/jdbc/src/test/java/com/taosdata/jdbc/rs/RestfulDriverTest.java
index a91d1c2d6b..d07a6a2179 100644
--- a/src/connector/jdbc/src/test/java/com/taosdata/jdbc/rs/RestfulDriverTest.java
+++ b/src/connector/jdbc/src/test/java/com/taosdata/jdbc/rs/RestfulDriverTest.java
@@ -8,33 +8,43 @@ import java.sql.*;
public class RestfulDriverTest {
@Test
- public void testCase001() {
- try {
- Class.forName("com.taosdata.jdbc.rs.RestfulDriver");
- Connection connection = DriverManager.getConnection("jdbc:TAOS-RS://master:6041/?user=root&password=taosdata");
- Statement statement = connection.createStatement();
- ResultSet resultSet = statement.executeQuery("select * from log.log");
- ResultSetMetaData metaData = resultSet.getMetaData();
- while (resultSet.next()) {
- for (int i = 1; i <= metaData.getColumnCount(); i++) {
- String column = metaData.getColumnLabel(i);
- String value = resultSet.getString(i);
- System.out.print(column + ":" + value + "\t");
- }
- System.out.println();
- }
- statement.close();
- connection.close();
- } catch (SQLException | ClassNotFoundException e) {
- e.printStackTrace();
- }
+ public void connect() {
+
}
@Test
- public void testAcceptUrl() throws SQLException {
+ public void acceptsURL() throws SQLException {
Driver driver = new RestfulDriver();
boolean isAccept = driver.acceptsURL("jdbc:TAOS-RS://master:6041");
Assert.assertTrue(isAccept);
+ isAccept = driver.acceptsURL("jdbc:TAOS://master:6041");
+ Assert.assertFalse(isAccept);
}
+ @Test
+ public void getPropertyInfo() throws SQLException {
+ Driver driver = new RestfulDriver();
+ final String url = "";
+ DriverPropertyInfo[] propertyInfo = driver.getPropertyInfo(url, null);
+ }
+
+ @Test
+ public void getMajorVersion() {
+ Assert.assertEquals(2, new RestfulDriver().getMajorVersion());
+ }
+
+ @Test
+ public void getMinorVersion() {
+ Assert.assertEquals(0, new RestfulDriver().getMinorVersion());
+ }
+
+ @Test
+ public void jdbcCompliant() {
+ Assert.assertFalse(new RestfulDriver().jdbcCompliant());
+ }
+
+ @Test(expected = SQLFeatureNotSupportedException.class)
+ public void getParentLogger() throws SQLFeatureNotSupportedException {
+ new RestfulDriver().getParentLogger();
+ }
}
diff --git a/src/connector/jdbc/src/test/java/com/taosdata/jdbc/rs/RestfulJDBCTest.java b/src/connector/jdbc/src/test/java/com/taosdata/jdbc/rs/RestfulJDBCTest.java
new file mode 100644
index 0000000000..d13475b96d
--- /dev/null
+++ b/src/connector/jdbc/src/test/java/com/taosdata/jdbc/rs/RestfulJDBCTest.java
@@ -0,0 +1,108 @@
+package com.taosdata.jdbc.rs;
+
+import org.junit.*;
+import org.junit.runners.MethodSorters;
+
+import java.sql.*;
+import java.util.Random;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class RestfulJDBCTest {
+
+ private Connection connection;
+
+ @Before
+ public void before() throws ClassNotFoundException, SQLException {
+ Class.forName("com.taosdata.jdbc.rs.RestfulDriver");
+ connection = DriverManager.getConnection("jdbc:TAOS-RS://master:6041/restful_test?user=root&password=taosdata");
+ }
+
+ @After
+ public void after() throws SQLException {
+ if (connection != null)
+ connection.close();
+ }
+
+
+ /**
+ * 查询所有log.log
+ **/
+ @Test
+ public void testCase001() {
+ try {
+ Statement statement = connection.createStatement();
+ ResultSet resultSet = statement.executeQuery("select * from log.log");
+ ResultSetMetaData metaData = resultSet.getMetaData();
+ while (resultSet.next()) {
+ for (int i = 1; i <= metaData.getColumnCount(); i++) {
+ String column = metaData.getColumnLabel(i);
+ String value = resultSet.getString(i);
+ System.out.print(column + ":" + value + "\t");
+ }
+ System.out.println();
+ }
+ statement.close();
+ } catch (SQLException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * create database
+ */
+ @Test
+ public void testCase002() {
+ try (Statement stmt = connection.createStatement()) {
+ stmt.execute("drop database if exists restful_test");
+ stmt.execute("create database if not exists restful_test");
+ stmt.execute("use restful_test");
+ } catch (SQLException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * create super table
+ ***/
+ @Test
+ public void testCase003() {
+ try (Statement stmt = connection.createStatement()) {
+ stmt.execute("create table weather(ts timestamp, temperature float, humidity int) tags(location nchar(64), groupId int)");
+ } catch (SQLException e) {
+ e.printStackTrace();
+ }
+ }
+
+ @Test
+ public void testCase004() {
+ try (Statement stmt = connection.createStatement()) {
+ for (int i = 1; i <= 100; i++) {
+ stmt.execute("create table t" + i + " using weather tags('beijing', '" + i + "')");
+ }
+ } catch (SQLException e) {
+ e.printStackTrace();
+ }
+ }
+
+ private Random random = new Random(System.currentTimeMillis());
+
+ @Test
+ public void testCase005() {
+ try (Statement stmt = connection.createStatement()) {
+ int rows = 0;
+ for (int i = 0; i < 10; i++) {
+ for (int j = 1; j <= 100; j++) {
+ long currentTimeMillis = System.currentTimeMillis();
+ int affectRows = stmt.executeUpdate("insert into t" + j + " values(" + currentTimeMillis + "," + (random.nextFloat() * 50) + "," + random.nextInt(100) + ")");
+ Assert.assertEquals(1, affectRows);
+ rows += affectRows;
+ }
+ }
+ Assert.assertEquals(1000, rows);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ }
+ }
+
+
+}
diff --git a/src/connector/jdbc/src/test/java/com/taosdata/jdbc/utils/SqlSyntaxValidatorTest.java b/src/connector/jdbc/src/test/java/com/taosdata/jdbc/utils/SqlSyntaxValidatorTest.java
new file mode 100644
index 0000000000..fb570e16c4
--- /dev/null
+++ b/src/connector/jdbc/src/test/java/com/taosdata/jdbc/utils/SqlSyntaxValidatorTest.java
@@ -0,0 +1,28 @@
+package com.taosdata.jdbc.utils;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class SqlSyntaxValidatorTest {
+
+ @Test
+ public void validateSqlSyntax() {
+ }
+
+ @Test
+ public void isSelectSQL() {
+ Assert.assertTrue(SqlSyntaxValidator.isSelectSql("select * from test.weather"));
+ Assert.assertTrue(SqlSyntaxValidator.isSelectSql(" select * from test.weather"));
+ Assert.assertTrue(SqlSyntaxValidator.isSelectSql(" select * from test.weather "));
+ Assert.assertFalse(SqlSyntaxValidator.isSelectSql("insert into test.weather values(now, 1.1, 2)"));
+ }
+
+ @Test
+ public void isUseSQL() {
+ Assert.assertTrue(SqlSyntaxValidator.isUseSql("use database test"));
+ Assert.assertTrue(SqlSyntaxValidator.isUseSql("create database test"));
+ Assert.assertTrue(SqlSyntaxValidator.isUseSql("create database if not exist test"));
+ Assert.assertTrue(SqlSyntaxValidator.isUseSql("drop database test"));
+ Assert.assertTrue(SqlSyntaxValidator.isUseSql("drop database if exist test"));
+ }
+}
\ No newline at end of file
diff --git a/src/connector/python/linux/python2/setup.py b/src/connector/python/linux/python2/setup.py
index 2e4f80b8f0..b3daa98bdc 100644
--- a/src/connector/python/linux/python2/setup.py
+++ b/src/connector/python/linux/python2/setup.py
@@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
setuptools.setup(
name="taos",
- version="2.0.0",
+ version="2.0.2",
author="Taosdata Inc.",
author_email="support@taosdata.com",
description="TDengine python client package",
diff --git a/src/connector/python/linux/python2/taos/cinterface.py b/src/connector/python/linux/python2/taos/cinterface.py
index 269326535c..6f0435722f 100644
--- a/src/connector/python/linux/python2/taos/cinterface.py
+++ b/src/connector/python/linux/python2/taos/cinterface.py
@@ -18,7 +18,7 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
_timestamp_converter = _convert_microsecond_to_datetime
if num_of_rows > 0:
- return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)][::1]))
+ return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
else:
return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
@@ -26,7 +26,7 @@ def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)][::1] ]
+ return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
else:
return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[:abs(num_of_rows)] ]
@@ -34,7 +34,7 @@ def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C tinyint row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)][::1] ]
+ return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
else:
return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
@@ -42,7 +42,7 @@ def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)][::1]]
+ return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)]]
else:
return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)] ]
@@ -50,7 +50,7 @@ def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)][::1] ]
+ return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ]
else:
return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ]
@@ -58,7 +58,7 @@ def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bigint row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)][::1] ]
+ return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)] ]
else:
return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)] ]
@@ -66,7 +66,7 @@ def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C float row to python row
"""
if num_of_rows > 0:
- return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)][::1] ]
+ return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ]
else:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ]
@@ -74,7 +74,7 @@ def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C double row to python row
"""
if num_of_rows > 0:
- return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)][::1] ]
+ return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ]
else:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ]
@@ -82,7 +82,7 @@ def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row
"""
if num_of_rows > 0:
- return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)][::1]]
+ return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
else:
return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
@@ -90,9 +90,7 @@ def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C nchar row to python row
"""
assert(nbytes is not None)
-
- res = []
-
+ res=[]
for i in range(abs(num_of_rows)):
try:
if num_of_rows >= 0:
@@ -103,17 +101,49 @@ def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
except ValueError:
res.append(None)
+ return res
+
+def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
+ """Function to convert C binary row to python row
+ """
+ res=[]
+ if num_of_rows > 0:
+ for i in range(abs(num_of_rows)):
+ try:
+ rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop()
+ tmpstr = ctypes.c_char_p(data+nbytes*i+2)
+ res.append( tmpstr.value.decode()[0:rbyte] )
+ except ValueError:
+ res.append(None)
+ else:
+ for i in range(abs(num_of_rows)):
+ try:
+ rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop()
+ tmpstr = ctypes.c_char_p(data+nbytes*i+2)
+ res.append( tmpstr.value.decode()[0:rbyte] )
+ except ValueError:
+ res.append(None)
+ return res
+
+def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
+ """Function to convert C nchar row to python row
+ """
+ assert(nbytes is not None)
+ res=[]
+ if num_of_rows >= 0:
+ for i in range(abs(num_of_rows)):
+ try:
+ tmpstr = ctypes.c_char_p(data+nbytes*i+2)
+ res.append( tmpstr.value.decode() )
+ except ValueError:
+ res.append(None)
+ else:
+ for i in range(abs(num_of_rows)):
+ try:
+ res.append( (ctypes.cast(data+nbytes*i+2, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value )
+ except ValueError:
+ res.append(None)
return res
- # if num_of_rows > 0:
- # for i in range(abs(num_of_rows)):
- # try:
- # res.append( (ctypes.cast(data+nbytes*i, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value )
- # except ValueError:
- # res.append(None)
- # return res
- # # return [ele.value for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[:abs(num_of_rows)][::-1]]
- # else:
- # return [ele.value for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[:abs(num_of_rows)]]
_CONVERT_FUNC = {
FieldType.C_BOOL: _crow_bool_to_python,
@@ -128,6 +158,19 @@ _CONVERT_FUNC = {
FieldType.C_NCHAR : _crow_nchar_to_python
}
+_CONVERT_FUNC_BLOCK = {
+ FieldType.C_BOOL: _crow_bool_to_python,
+ FieldType.C_TINYINT : _crow_tinyint_to_python,
+ FieldType.C_SMALLINT : _crow_smallint_to_python,
+ FieldType.C_INT : _crow_int_to_python,
+ FieldType.C_BIGINT : _crow_bigint_to_python,
+ FieldType.C_FLOAT : _crow_float_to_python,
+ FieldType.C_DOUBLE : _crow_double_to_python,
+ FieldType.C_BINARY: _crow_binary_to_python_block,
+ FieldType.C_TIMESTAMP : _crow_timestamp_to_python,
+ FieldType.C_NCHAR : _crow_nchar_to_python_block
+}
+
# Corresponding TAOS_FIELD structure in C
class TaosField(ctypes.Structure):
_fields_ = [('name', ctypes.c_char * 65),
@@ -227,8 +270,8 @@ class CTaosInterface(object):
print('connect to TDengine failed')
raise ConnectionError("connect to TDengine failed")
# sys.exit(1)
- else:
- print('connect to TDengine success')
+ #else:
+ # print('connect to TDengine success')
return connection
@@ -237,7 +280,7 @@ class CTaosInterface(object):
'''Close the TDengine handle
'''
CTaosInterface.libtaos.taos_close(connection)
- print('connection is closed')
+ #print('connection is closed')
@staticmethod
def query(connection, sql):
@@ -310,6 +353,24 @@ class CTaosInterface(object):
@staticmethod
def fetchBlock(result, fields):
+ pblock = ctypes.c_void_p(0)
+ num_of_rows = CTaosInterface.libtaos.taos_fetch_block(
+ result, ctypes.byref(pblock))
+ if num_of_rows == 0:
+ return None, 0
+ isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO)
+ blocks = [None] * len(fields)
+ fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
+ fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]]
+ for i in range(len(fields)):
+ data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
+ if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
+ raise DatabaseError("Invalid data type returned from database")
+ blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro)
+
+ return blocks, abs(num_of_rows)
+ @staticmethod
+ def fetchRow(result, fields):
pblock = ctypes.c_void_p(0)
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
if pblock :
@@ -393,7 +454,7 @@ class CTaosInterface(object):
def errStr(result):
"""Return the error styring
"""
- return CTaosInterface.libtaos.taos_errstr(result)
+ return CTaosInterface.libtaos.taos_errstr(result).decode('utf-8')
if __name__ == '__main__':
@@ -410,4 +471,4 @@ if __name__ == '__main__':
print(data)
cinter.freeResult(result)
- cinter.close(conn)
\ No newline at end of file
+ cinter.close(conn)
diff --git a/src/connector/python/linux/python2/taos/cursor.py b/src/connector/python/linux/python2/taos/cursor.py
index bc6670ca77..eee5e39488 100644
--- a/src/connector/python/linux/python2/taos/cursor.py
+++ b/src/connector/python/linux/python2/taos/cursor.py
@@ -49,7 +49,7 @@ class TDengineCursor(object):
raise OperationalError("Invalid use of fetch iterator")
if self._block_rows <= self._block_iter:
- block, self._block_rows = CTaosInterface.fetchBlock(
+ block, self._block_rows = CTaosInterface.fetchRow(
self._result, self._fields)
if self._block_rows == 0:
raise StopIteration
@@ -190,6 +190,23 @@ class TDengineCursor(object):
if self._result is None or self._fields is None:
raise OperationalError("Invalid use of fetchall")
+ buffer = [[] for i in range(len(self._fields))]
+ self._rowcount = 0
+ while True:
+ block, num_of_fields = CTaosInterface.fetchRow(self._result, self._fields)
+ errno = CTaosInterface.libtaos.taos_errno(self._result)
+ if errno != 0:
+ raise ProgrammingError(CTaosInterface.errStr(self._result), errno)
+ if num_of_fields == 0:
+ break
+ self._rowcount += num_of_fields
+ for i in range(len(self._fields)):
+ buffer[i].extend(block[i])
+ return list(map(tuple, zip(*buffer)))
+ def fetchall_block(self):
+ if self._result is None or self._fields is None:
+ raise OperationalError("Invalid use of fetchall")
+
buffer = [[] for i in range(len(self._fields))]
self._rowcount = 0
while True:
@@ -203,7 +220,6 @@ class TDengineCursor(object):
for i in range(len(self._fields)):
buffer[i].extend(block[i])
return list(map(tuple, zip(*buffer)))
-
def nextset(self):
"""
"""
diff --git a/src/connector/python/linux/python3/setup.py b/src/connector/python/linux/python3/setup.py
index 03a49fc1c5..f49ebe2b6d 100644
--- a/src/connector/python/linux/python3/setup.py
+++ b/src/connector/python/linux/python3/setup.py
@@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
setuptools.setup(
name="taos",
- version="2.0.0",
+ version="2.0.2",
author="Taosdata Inc.",
author_email="support@taosdata.com",
description="TDengine python client package",
diff --git a/src/connector/python/linux/python3/taos/cinterface.py b/src/connector/python/linux/python3/taos/cinterface.py
index be5b99d8c1..bbf5a0c714 100644
--- a/src/connector/python/linux/python3/taos/cinterface.py
+++ b/src/connector/python/linux/python3/taos/cinterface.py
@@ -18,7 +18,7 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
_timestamp_converter = _convert_microsecond_to_datetime
if num_of_rows > 0:
- return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)][::1]))
+ return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
else:
return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
@@ -26,7 +26,7 @@ def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)][::1] ]
+ return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
else:
return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[:abs(num_of_rows)] ]
@@ -34,7 +34,7 @@ def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C tinyint row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)][::1] ]
+ return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
else:
return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
@@ -42,7 +42,7 @@ def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)][::1]]
+ return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)]]
else:
return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)] ]
@@ -50,7 +50,7 @@ def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)][::1] ]
+ return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ]
else:
return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ]
@@ -58,7 +58,7 @@ def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bigint row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)][::1] ]
+ return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)] ]
else:
return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)] ]
@@ -66,7 +66,7 @@ def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C float row to python row
"""
if num_of_rows > 0:
- return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)][::1] ]
+ return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ]
else:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ]
@@ -74,7 +74,7 @@ def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C double row to python row
"""
if num_of_rows > 0:
- return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)][::1] ]
+ return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ]
else:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ]
@@ -82,7 +82,7 @@ def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row
"""
if num_of_rows > 0:
- return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)][::1]]
+ return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
else:
return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
@@ -90,9 +90,7 @@ def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C nchar row to python row
"""
assert(nbytes is not None)
-
- res = []
-
+ res=[]
for i in range(abs(num_of_rows)):
try:
if num_of_rows >= 0:
@@ -103,17 +101,49 @@ def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
except ValueError:
res.append(None)
+ return res
+
+def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
+ """Function to convert C binary row to python row
+ """
+ res=[]
+ if num_of_rows > 0:
+ for i in range(abs(num_of_rows)):
+ try:
+ rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop()
+ tmpstr = ctypes.c_char_p(data+nbytes*i+2)
+ res.append( tmpstr.value.decode()[0:rbyte] )
+ except ValueError:
+ res.append(None)
+ else:
+ for i in range(abs(num_of_rows)):
+ try:
+ rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop()
+ tmpstr = ctypes.c_char_p(data+nbytes*i+2)
+ res.append( tmpstr.value.decode()[0:rbyte] )
+ except ValueError:
+ res.append(None)
+ return res
+
+def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
+ """Function to convert C nchar row to python row
+ """
+ assert(nbytes is not None)
+ res=[]
+ if num_of_rows >= 0:
+ for i in range(abs(num_of_rows)):
+ try:
+ tmpstr = ctypes.c_char_p(data+nbytes*i+2)
+ res.append( tmpstr.value.decode() )
+ except ValueError:
+ res.append(None)
+ else:
+ for i in range(abs(num_of_rows)):
+ try:
+ res.append( (ctypes.cast(data+nbytes*i+2, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value )
+ except ValueError:
+ res.append(None)
return res
- # if num_of_rows > 0:
- # for i in range(abs(num_of_rows)):
- # try:
- # res.append( (ctypes.cast(data+nbytes*i, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value )
- # except ValueError:
- # res.append(None)
- # return res
- # # return [ele.value for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[:abs(num_of_rows)][::1]]
- # else:
- # return [ele.value for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[:abs(num_of_rows)]]
_CONVERT_FUNC = {
FieldType.C_BOOL: _crow_bool_to_python,
@@ -128,6 +158,19 @@ _CONVERT_FUNC = {
FieldType.C_NCHAR : _crow_nchar_to_python
}
+_CONVERT_FUNC_BLOCK = {
+ FieldType.C_BOOL: _crow_bool_to_python,
+ FieldType.C_TINYINT : _crow_tinyint_to_python,
+ FieldType.C_SMALLINT : _crow_smallint_to_python,
+ FieldType.C_INT : _crow_int_to_python,
+ FieldType.C_BIGINT : _crow_bigint_to_python,
+ FieldType.C_FLOAT : _crow_float_to_python,
+ FieldType.C_DOUBLE : _crow_double_to_python,
+ FieldType.C_BINARY: _crow_binary_to_python_block,
+ FieldType.C_TIMESTAMP : _crow_timestamp_to_python,
+ FieldType.C_NCHAR : _crow_nchar_to_python_block
+}
+
# Corresponding TAOS_FIELD structure in C
class TaosField(ctypes.Structure):
_fields_ = [('name', ctypes.c_char * 65),
@@ -253,7 +296,7 @@ class CTaosInterface(object):
raise AttributeError("sql is expected as a string")
# finally:
# CTaosInterface.libtaos.close(connection)
-
+
@staticmethod
def affectedRows(result):
"""The affected rows after runing query
@@ -308,29 +351,26 @@ class CTaosInterface(object):
return fields
- # @staticmethod
- # def fetchBlock(result, fields):
- # pblock = ctypes.c_void_p(0)
- # num_of_rows = CTaosInterface.libtaos.taos_fetch_block(
- # result, ctypes.byref(pblock))
- # if num_of_rows == 0:
- # return None, 0
-
- # isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO)
- # blocks = [None] * len(fields)
- # fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
- # fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]]
- # for i in range(len(fields)):
- # data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
-
- # if fields[i]['type'] not in _CONVERT_FUNC:
- # raise DatabaseError("Invalid data type returned from database")
- # print('====================',fieldLen[i])
- # blocks[i] = _CONVERT_FUNC[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro)
-
- # return blocks, abs(num_of_rows)
@staticmethod
def fetchBlock(result, fields):
+ pblock = ctypes.c_void_p(0)
+ num_of_rows = CTaosInterface.libtaos.taos_fetch_block(
+ result, ctypes.byref(pblock))
+ if num_of_rows == 0:
+ return None, 0
+ isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO)
+ blocks = [None] * len(fields)
+ fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
+ fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]]
+ for i in range(len(fields)):
+ data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
+ if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
+ raise DatabaseError("Invalid data type returned from database")
+ blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro)
+
+ return blocks, abs(num_of_rows)
+ @staticmethod
+ def fetchRow(result, fields):
pblock = ctypes.c_void_p(0)
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
if pblock :
@@ -350,6 +390,7 @@ class CTaosInterface(object):
else:
return None, 0
return blocks, abs(num_of_rows)
+
@staticmethod
def freeResult(result):
CTaosInterface.libtaos.taos_free_result(result)
diff --git a/src/connector/python/linux/python3/taos/cursor.py b/src/connector/python/linux/python3/taos/cursor.py
index eb10bed485..d8184668c8 100644
--- a/src/connector/python/linux/python3/taos/cursor.py
+++ b/src/connector/python/linux/python3/taos/cursor.py
@@ -5,7 +5,6 @@ import threading
# querySeqNum = 0
-
class TDengineCursor(object):
"""Database cursor which is used to manage the context of a fetch operation.
@@ -51,7 +50,7 @@ class TDengineCursor(object):
raise OperationalError("Invalid use of fetch iterator")
if self._block_rows <= self._block_iter:
- block, self._block_rows = CTaosInterface.fetchBlock(
+ block, self._block_rows = CTaosInterface.fetchRow(
self._result, self._fields)
if self._block_rows == 0:
raise StopIteration
@@ -196,18 +195,13 @@ class TDengineCursor(object):
def fetchall(self):
"""Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples). Note that the cursor's arraysize attribute can affect the performance of this operation.
"""
- # if threading.get_ident() != self._threadId:
- # info ="[WARNING] Cursor fetchall:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
- # raise OperationalError(info)
- # print(info)
- # return None
if self._result is None or self._fields is None:
raise OperationalError("Invalid use of fetchall")
buffer = [[] for i in range(len(self._fields))]
self._rowcount = 0
while True:
- block, num_of_fields = CTaosInterface.fetchBlock(self._result, self._fields)
+ block, num_of_fields = CTaosInterface.fetchRow(self._result, self._fields)
errno = CTaosInterface.libtaos.taos_errno(self._result)
if errno != 0:
raise ProgrammingError(CTaosInterface.errStr(self._result), errno)
@@ -218,6 +212,22 @@ class TDengineCursor(object):
buffer[i].extend(block[i])
return list(map(tuple, zip(*buffer)))
+ def fetchall_block(self):
+ if self._result is None or self._fields is None:
+ raise OperationalError("Invalid use of fetchall")
+
+ buffer = [[] for i in range(len(self._fields))]
+ self._rowcount = 0
+ while True:
+ block, num_of_fields = CTaosInterface.fetchBlock(self._result, self._fields)
+ errno = CTaosInterface.libtaos.taos_errno(self._result)
+ if errno != 0:
+ raise ProgrammingError(CTaosInterface.errStr(self._result), errno)
+ if num_of_fields == 0: break
+ self._rowcount += num_of_fields
+ for i in range(len(self._fields)):
+ buffer[i].extend(block[i])
+ return list(map(tuple, zip(*buffer)))
def nextset(self):
"""
"""
diff --git a/src/connector/python/windows/python2/setup.py b/src/connector/python/windows/python2/setup.py
index fd82a55650..34cb7a04d7 100644
--- a/src/connector/python/windows/python2/setup.py
+++ b/src/connector/python/windows/python2/setup.py
@@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
setuptools.setup(
name="taos",
- version="2.0.0",
+ version="2.0.2",
author="Taosdata Inc.",
author_email="support@taosdata.com",
description="TDengine python client package",
diff --git a/src/connector/python/windows/python2/taos/cinterface.py b/src/connector/python/windows/python2/taos/cinterface.py
index 084d38e41c..d4bf1a0350 100644
--- a/src/connector/python/windows/python2/taos/cinterface.py
+++ b/src/connector/python/windows/python2/taos/cinterface.py
@@ -18,7 +18,7 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
_timestamp_converter = _convert_microsecond_to_datetime
if num_of_rows > 0:
- return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_longlong))[:abs(num_of_rows)][::1]))
+ return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_longlong))[:abs(num_of_rows)]))
else:
return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_longlong))[:abs(num_of_rows)]))
@@ -26,7 +26,7 @@ def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)][::1] ]
+ return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
else:
return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[:abs(num_of_rows)] ]
@@ -34,7 +34,7 @@ def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C tinyint row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)][::1] ]
+ return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
else:
return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
@@ -42,7 +42,7 @@ def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)][::1]]
+ return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)]]
else:
return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)] ]
@@ -50,7 +50,7 @@ def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)][::1] ]
+ return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ]
else:
return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ]
@@ -58,7 +58,7 @@ def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bigint row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_longlong))[:abs(num_of_rows)][::1] ]
+ return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_longlong))[:abs(num_of_rows)] ]
else:
return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_longlong))[:abs(num_of_rows)] ]
@@ -66,7 +66,7 @@ def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C float row to python row
"""
if num_of_rows > 0:
- return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)][::1] ]
+ return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ]
else:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ]
@@ -74,7 +74,7 @@ def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C double row to python row
"""
if num_of_rows > 0:
- return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)][::1] ]
+ return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ]
else:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ]
@@ -82,7 +82,7 @@ def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row
"""
if num_of_rows > 0:
- return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)][::1]]
+ return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
else:
return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
@@ -90,9 +90,7 @@ def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C nchar row to python row
"""
assert(nbytes is not None)
-
- res = []
-
+ res=[]
for i in range(abs(num_of_rows)):
try:
if num_of_rows >= 0:
@@ -103,17 +101,49 @@ def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
except ValueError:
res.append(None)
+ return res
+
+def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
+ """Function to convert C binary row to python row
+ """
+ res=[]
+ if num_of_rows > 0:
+ for i in range(abs(num_of_rows)):
+ try:
+ rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop()
+ tmpstr = ctypes.c_char_p(data+nbytes*i+2)
+ res.append( tmpstr.value.decode()[0:rbyte] )
+ except ValueError:
+ res.append(None)
+ else:
+ for i in range(abs(num_of_rows)):
+ try:
+ rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop()
+ tmpstr = ctypes.c_char_p(data+nbytes*i+2)
+ res.append( tmpstr.value.decode()[0:rbyte] )
+ except ValueError:
+ res.append(None)
+ return res
+
+def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
+ """Function to convert C nchar row to python row
+ """
+ assert(nbytes is not None)
+ res=[]
+ if num_of_rows >= 0:
+ for i in range(abs(num_of_rows)):
+ try:
+ tmpstr = ctypes.c_char_p(data+nbytes*i+2)
+ res.append( tmpstr.value.decode() )
+ except ValueError:
+ res.append(None)
+ else:
+ for i in range(abs(num_of_rows)):
+ try:
+ res.append( (ctypes.cast(data+nbytes*i+2, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value )
+ except ValueError:
+ res.append(None)
return res
- # if num_of_rows > 0:
- # for i in range(abs(num_of_rows)):
- # try:
- # res.append( (ctypes.cast(data+nbytes*i, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value )
- # except ValueError:
- # res.append(None)
- # return res
- # # return [ele.value for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[:abs(num_of_rows)][::-1]]
- # else:
- # return [ele.value for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[:abs(num_of_rows)]]
_CONVERT_FUNC = {
FieldType.C_BOOL: _crow_bool_to_python,
@@ -128,6 +158,19 @@ _CONVERT_FUNC = {
FieldType.C_NCHAR : _crow_nchar_to_python
}
+_CONVERT_FUNC_BLOCK = {
+ FieldType.C_BOOL: _crow_bool_to_python,
+ FieldType.C_TINYINT : _crow_tinyint_to_python,
+ FieldType.C_SMALLINT : _crow_smallint_to_python,
+ FieldType.C_INT : _crow_int_to_python,
+ FieldType.C_BIGINT : _crow_bigint_to_python,
+ FieldType.C_FLOAT : _crow_float_to_python,
+ FieldType.C_DOUBLE : _crow_double_to_python,
+ FieldType.C_BINARY: _crow_binary_to_python_block,
+ FieldType.C_TIMESTAMP : _crow_timestamp_to_python,
+ FieldType.C_NCHAR : _crow_nchar_to_python_block
+}
+
# Corresponding TAOS_FIELD structure in C
class TaosField(ctypes.Structure):
_fields_ = [('name', ctypes.c_char * 65),
@@ -225,9 +268,10 @@ class CTaosInterface(object):
if connection.value == None:
print('connect to TDengine failed')
+ raise ConnectionError("connect to TDengine failed")
# sys.exit(1)
- else:
- print('connect to TDengine success')
+ #else:
+ # print('connect to TDengine success')
return connection
@@ -236,7 +280,7 @@ class CTaosInterface(object):
'''Close the TDengine handle
'''
CTaosInterface.libtaos.taos_close(connection)
- print('connection is closed')
+ #print('connection is closed')
@staticmethod
def query(connection, sql):
@@ -252,7 +296,7 @@ class CTaosInterface(object):
raise AttributeError("sql is expected as a string")
# finally:
# CTaosInterface.libtaos.close(connection)
-
+
@staticmethod
def affectedRows(result):
"""The affected rows after runing query
@@ -309,6 +353,24 @@ class CTaosInterface(object):
@staticmethod
def fetchBlock(result, fields):
+ pblock = ctypes.c_void_p(0)
+ num_of_rows = CTaosInterface.libtaos.taos_fetch_block(
+ result, ctypes.byref(pblock))
+ if num_of_rows == 0:
+ return None, 0
+ isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO)
+ blocks = [None] * len(fields)
+ fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
+ fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]]
+ for i in range(len(fields)):
+ data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
+ if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
+ raise DatabaseError("Invalid data type returned from database")
+ blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro)
+
+ return blocks, abs(num_of_rows)
+ @staticmethod
+ def fetchRow(result, fields):
pblock = ctypes.c_void_p(0)
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
if pblock :
@@ -393,7 +455,7 @@ class CTaosInterface(object):
def errStr(result):
"""Return the error styring
"""
- return CTaosInterface.libtaos.taos_errstr(result)
+ return CTaosInterface.libtaos.taos_errstr(result).decode('utf-8')
if __name__ == '__main__':
@@ -410,4 +472,4 @@ if __name__ == '__main__':
print(data)
cinter.freeResult(result)
- cinter.close(conn)
\ No newline at end of file
+ cinter.close(conn)
diff --git a/src/connector/python/windows/python2/taos/cursor.py b/src/connector/python/windows/python2/taos/cursor.py
index 35846cbe11..71651afee1 100644
--- a/src/connector/python/windows/python2/taos/cursor.py
+++ b/src/connector/python/windows/python2/taos/cursor.py
@@ -50,7 +50,7 @@ class TDengineCursor(object):
raise OperationalError("Invalid use of fetch iterator")
if self._block_rows <= self._block_iter:
- block, self._block_rows = CTaosInterface.fetchBlock(self._result, self._fields)
+ block, self._block_rows = CTaosInterface.fetchRow(self._result, self._fields)
if self._block_rows == 0:
raise StopIteration
self._block = list(map(tuple, zip(*block)))
@@ -143,7 +143,25 @@ class TDengineCursor(object):
"""
if self._result is None or self._fields is None:
raise OperationalError("Invalid use of fetchall")
+
+ buffer = [[] for i in range(len(self._fields))]
+ self._rowcount = 0
+ while True:
+ block, num_of_fields = CTaosInterface.fetchRow(self._result, self._fields)
+ errno = CTaosInterface.libtaos.taos_errno(self._result)
+ if errno != 0:
+ raise ProgrammingError(CTaosInterface.errStr(self._result), errno)
+ if num_of_fields == 0:
+ break
+ self._rowcount += num_of_fields
+ for i in range(len(self._fields)):
+ buffer[i].extend(block[i])
+ return list(map(tuple, zip(*buffer)))
+ def fetchall_block(self):
+ if self._result is None or self._fields is None:
+ raise OperationalError("Invalid use of fetchall")
+
buffer = [[] for i in range(len(self._fields))]
self._rowcount = 0
while True:
@@ -178,7 +196,7 @@ class TDengineCursor(object):
self._description = None
self._rowcount = -1
if self._result is not None:
- CTaosInterface.freeResult(self._result)
+ CTaosInterface.freeResult(self._result)
self._result = None
self._fields = None
self._block = None
diff --git a/src/connector/python/windows/python3/setup.py b/src/connector/python/windows/python3/setup.py
index 9abdace5a9..c09644d330 100644
--- a/src/connector/python/windows/python3/setup.py
+++ b/src/connector/python/windows/python3/setup.py
@@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
setuptools.setup(
name="taos",
- version="2.0.0",
+ version="2.0.2",
author="Taosdata Inc.",
author_email="support@taosdata.com",
description="TDengine python client package",
diff --git a/src/connector/python/windows/python3/taos/cinterface.py b/src/connector/python/windows/python3/taos/cinterface.py
index 68adf191c9..fb1b65fc15 100644
--- a/src/connector/python/windows/python3/taos/cinterface.py
+++ b/src/connector/python/windows/python3/taos/cinterface.py
@@ -18,7 +18,7 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
_timestamp_converter = _convert_microsecond_to_datetime
if num_of_rows > 0:
- return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_longlong))[:abs(num_of_rows)][::1]))
+ return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_longlong))[:abs(num_of_rows)]))
else:
return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_longlong))[:abs(num_of_rows)]))
@@ -26,7 +26,7 @@ def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)][::1] ]
+ return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
else:
return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[:abs(num_of_rows)] ]
@@ -34,7 +34,7 @@ def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C tinyint row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)][::1] ]
+ return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
else:
return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
@@ -42,7 +42,7 @@ def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)][::1]]
+ return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)]]
else:
return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)] ]
@@ -50,7 +50,7 @@ def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)][::1] ]
+ return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ]
else:
return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ]
@@ -58,7 +58,7 @@ def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bigint row to python row
"""
if num_of_rows > 0:
- return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_longlong))[:abs(num_of_rows)][::1] ]
+ return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_longlong))[:abs(num_of_rows)] ]
else:
return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_longlong))[:abs(num_of_rows)] ]
@@ -66,7 +66,7 @@ def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C float row to python row
"""
if num_of_rows > 0:
- return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)][::1] ]
+ return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ]
else:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ]
@@ -74,7 +74,7 @@ def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C double row to python row
"""
if num_of_rows > 0:
- return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)][::1] ]
+ return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ]
else:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ]
@@ -82,7 +82,7 @@ def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row
"""
if num_of_rows > 0:
- return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)][::1]]
+ return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
else:
return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
@@ -104,16 +104,48 @@ def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
res.append(None)
return res
- # if num_of_rows > 0:
- # for i in range(abs(num_of_rows)):
- # try:
- # res.append( (ctypes.cast(data+nbytes*i, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value )
- # except ValueError:
- # res.append(None)
- # return res
- # # return [ele.value for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[:abs(num_of_rows)][::-1]]
- # else:
- # return [ele.value for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[:abs(num_of_rows)]]
+
+def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
+ """Function to convert C binary row to python row
+ """
+ res=[]
+ if num_of_rows > 0:
+ for i in range(abs(num_of_rows)):
+ try:
+ rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop()
+ tmpstr = ctypes.c_char_p(data+nbytes*i+2)
+ res.append( tmpstr.value.decode()[0:rbyte] )
+ except ValueError:
+ res.append(None)
+ else:
+ for i in range(abs(num_of_rows)):
+ try:
+ rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop()
+ tmpstr = ctypes.c_char_p(data+nbytes*i+2)
+ res.append( tmpstr.value.decode()[0:rbyte] )
+ except ValueError:
+ res.append(None)
+ return res
+
+def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
+ """Function to convert C nchar row to python row
+ """
+ assert(nbytes is not None)
+ res=[]
+ if num_of_rows >= 0:
+ for i in range(abs(num_of_rows)):
+ try:
+ tmpstr = ctypes.c_char_p(data+nbytes*i+2)
+ res.append( tmpstr.value.decode() )
+ except ValueError:
+ res.append(None)
+ else:
+ for i in range(abs(num_of_rows)):
+ try:
+ res.append( (ctypes.cast(data+nbytes*i+2, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value )
+ except ValueError:
+ res.append(None)
+ return res
_CONVERT_FUNC = {
FieldType.C_BOOL: _crow_bool_to_python,
@@ -128,6 +160,19 @@ _CONVERT_FUNC = {
FieldType.C_NCHAR : _crow_nchar_to_python
}
+_CONVERT_FUNC_BLOCK = {
+ FieldType.C_BOOL: _crow_bool_to_python,
+ FieldType.C_TINYINT : _crow_tinyint_to_python,
+ FieldType.C_SMALLINT : _crow_smallint_to_python,
+ FieldType.C_INT : _crow_int_to_python,
+ FieldType.C_BIGINT : _crow_bigint_to_python,
+ FieldType.C_FLOAT : _crow_float_to_python,
+ FieldType.C_DOUBLE : _crow_double_to_python,
+ FieldType.C_BINARY: _crow_binary_to_python_block,
+ FieldType.C_TIMESTAMP : _crow_timestamp_to_python,
+ FieldType.C_NCHAR : _crow_nchar_to_python_block
+}
+
# Corresponding TAOS_FIELD structure in C
class TaosField(ctypes.Structure):
_fields_ = [('name', ctypes.c_char * 65),
@@ -227,8 +272,8 @@ class CTaosInterface(object):
print('connect to TDengine failed')
raise ConnectionError("connect to TDengine failed")
# sys.exit(1)
- else:
- print('connect to TDengine success')
+ #else:
+ # print('connect to TDengine success')
return connection
@@ -237,7 +282,7 @@ class CTaosInterface(object):
'''Close the TDengine handle
'''
CTaosInterface.libtaos.taos_close(connection)
- print('connection is closed')
+ #print('connection is closed')
@staticmethod
def query(connection, sql):
@@ -310,6 +355,24 @@ class CTaosInterface(object):
@staticmethod
def fetchBlock(result, fields):
+ pblock = ctypes.c_void_p(0)
+ num_of_rows = CTaosInterface.libtaos.taos_fetch_block(
+ result, ctypes.byref(pblock))
+ if num_of_rows == 0:
+ return None, 0
+ isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO)
+ blocks = [None] * len(fields)
+ fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
+ fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]]
+ for i in range(len(fields)):
+ data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
+ if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
+ raise DatabaseError("Invalid data type returned from database")
+ blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro)
+
+ return blocks, abs(num_of_rows)
+ @staticmethod
+ def fetchRow(result, fields):
pblock = ctypes.c_void_p(0)
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
if pblock :
diff --git a/src/connector/python/windows/python3/taos/cursor.py b/src/connector/python/windows/python3/taos/cursor.py
index b58b494cad..b813bba357 100644
--- a/src/connector/python/windows/python3/taos/cursor.py
+++ b/src/connector/python/windows/python3/taos/cursor.py
@@ -51,7 +51,7 @@ class TDengineCursor(object):
raise OperationalError("Invalid use of fetch iterator")
if self._block_rows <= self._block_iter:
- block, self._block_rows = CTaosInterface.fetchBlock(self._result, self._fields)
+ block, self._block_rows = CTaosInterface.fetchRow(self._result, self._fields)
if self._block_rows == 0:
raise StopIteration
self._block = list(map(tuple, zip(*block)))
@@ -144,7 +144,25 @@ class TDengineCursor(object):
"""
if self._result is None or self._fields is None:
raise OperationalError("Invalid use of fetchall")
-
+
+ buffer = [[] for i in range(len(self._fields))]
+ self._rowcount = 0
+ while True:
+ block, num_of_fields = CTaosInterface.fetchRow(self._result, self._fields)
+ errno = CTaosInterface.libtaos.taos_errno(self._result)
+ if errno != 0:
+ raise ProgrammingError(CTaosInterface.errStr(self._result), errno)
+ if num_of_fields == 0:
+ break
+ self._rowcount += num_of_fields
+ for i in range(len(self._fields)):
+ buffer[i].extend(block[i])
+ return list(map(tuple, zip(*buffer)))
+
+ def fetchall_block(self):
+ if self._result is None or self._fields is None:
+ raise OperationalError("Invalid use of fetchall")
+
buffer = [[] for i in range(len(self._fields))]
self._rowcount = 0
while True:
diff --git a/src/inc/taoserror.h b/src/inc/taoserror.h
index 3db2fd6362..3486911cfa 100644
--- a/src/inc/taoserror.h
+++ b/src/inc/taoserror.h
@@ -259,6 +259,7 @@ TAOS_DEFINE_ERROR(TSDB_CODE_GRANT_CPU_LIMITED, 0, 0x080B, "CPU cores
TAOS_DEFINE_ERROR(TSDB_CODE_SYN_INVALID_CONFIG, 0, 0x0900, "Invalid Sync Configuration")
TAOS_DEFINE_ERROR(TSDB_CODE_SYN_NOT_ENABLED, 0, 0x0901, "Sync module not enabled")
TAOS_DEFINE_ERROR(TSDB_CODE_SYN_INVALID_VERSION, 0, 0x0902, "Invalid Sync version")
+TAOS_DEFINE_ERROR(TSDB_CODE_SYN_CONFIRM_EXPIRED, 0, 0x0903, "Sync confirm expired")
// wal
TAOS_DEFINE_ERROR(TSDB_CODE_WAL_APP_ERROR, 0, 0x1000, "Unexpected generic error in wal")
@@ -365,7 +366,6 @@ TAOS_DEFINE_ERROR(TSDB_CODE_HTTP_OP_TAG_VALUE_TOO_LONG, 0, 0x11A4, "tag value
TAOS_DEFINE_ERROR(TSDB_CODE_HTTP_OP_VALUE_NULL, 0, 0x11A5, "value not find")
TAOS_DEFINE_ERROR(TSDB_CODE_HTTP_OP_VALUE_TYPE, 0, 0x11A6, "value type should be boolean, number or string")
-
TAOS_DEFINE_ERROR(TSDB_CODE_ODBC_OOM, 0, 0x2100, "out of memory")
TAOS_DEFINE_ERROR(TSDB_CODE_ODBC_CONV_CHAR_NOT_NUM, 0, 0x2101, "convertion not a valid literal input")
TAOS_DEFINE_ERROR(TSDB_CODE_ODBC_CONV_UNDEF, 0, 0x2102, "convertion undefined")
diff --git a/src/kit/taosdump/taosdump.c b/src/kit/taosdump/taosdump.c
index 88f07ee602..a7258c9724 100644
--- a/src/kit/taosdump/taosdump.c
+++ b/src/kit/taosdump/taosdump.c
@@ -2146,7 +2146,7 @@ int taosDumpInOneFile(TAOS * taos, FILE* fp, char* fcharset, char* encode, c
char * line = NULL;
size_t line_len = 0;
- cmd = (char *)malloc(COMMAND_SIZE);
+ cmd = (char *)malloc(TSDB_MAX_ALLOWED_SQL_LEN);
if (cmd == NULL) {
fprintf(stderr, "failed to allocate memory\n");
return -1;
@@ -2155,7 +2155,7 @@ int taosDumpInOneFile(TAOS * taos, FILE* fp, char* fcharset, char* encode, c
int lineNo = 0;
while ((read_len = getline(&line, &line_len, fp)) != -1) {
++lineNo;
- if (read_len >= COMMAND_SIZE) continue;
+ if (read_len >= TSDB_MAX_ALLOWED_SQL_LEN) continue;
line[--read_len] = '\0';
//if (read_len == 0 || isCommentLine(line)) { // line starts with #
@@ -2176,7 +2176,7 @@ int taosDumpInOneFile(TAOS * taos, FILE* fp, char* fcharset, char* encode, c
fprintf(stderr, "error sql: linenu:%d, file:%s\n", lineNo, fileName);
}
- memset(cmd, 0, COMMAND_SIZE);
+ memset(cmd, 0, TSDB_MAX_ALLOWED_SQL_LEN);
cmd_len = 0;
}
diff --git a/src/mnode/src/mnodeSdb.c b/src/mnode/src/mnodeSdb.c
index 36e1a1eb2b..e5fda26687 100644
--- a/src/mnode/src/mnodeSdb.c
+++ b/src/mnode/src/mnodeSdb.c
@@ -107,7 +107,7 @@ static taos_queue tsSdbWQueue;
static SSdbWorkerPool tsSdbPool;
static int32_t sdbProcessWrite(void *pRow, void *pHead, int32_t qtype, void *unused);
-static int32_t sdbWriteWalToQueue(int32_t vgId, void *pHead, int32_t qtype, void *rparam);
+static int32_t sdbWriteFwdToQueue(int32_t vgId, void *pHead, int32_t qtype, void *rparam);
static int32_t sdbWriteRowToQueue(SSdbRow *pRow, int32_t action);
static void sdbFreeFromQueue(SSdbRow *pRow);
static void * sdbWorkerFp(void *pWorker);
@@ -372,7 +372,7 @@ void sdbUpdateSync(void *pMnodes) {
sprintf(syncInfo.path, "%s", tsMnodeDir);
syncInfo.getWalInfo = sdbGetWalInfo;
syncInfo.getFileInfo = sdbGetFileInfo;
- syncInfo.writeToCache = sdbWriteWalToQueue;
+ syncInfo.writeToCache = sdbWriteFwdToQueue;
syncInfo.confirmForward = sdbConfirmForward;
syncInfo.notifyRole = sdbNotifyRole;
tsSdbMgmt.cfg = syncCfg;
@@ -559,7 +559,36 @@ static int32_t sdbUpdateHash(SSdbTable *pTable, SSdbRow *pRow) {
return TSDB_CODE_SUCCESS;
}
-static int sdbProcessWrite(void *wparam, void *hparam, int32_t qtype, void *unused) {
+static int32_t sdbPerformInsertAction(SWalHead *pHead, SSdbTable *pTable) {
+ SSdbRow row = {.rowSize = pHead->len, .rowData = pHead->cont, .pTable = pTable};
+ (*pTable->fpDecode)(&row);
+ return sdbInsertHash(pTable, &row);
+}
+
+static int32_t sdbPerformDeleteAction(SWalHead *pHead, SSdbTable *pTable) {
+ void *pObj = sdbGetRowMeta(pTable, pHead->cont);
+ if (pObj == NULL) {
+ sdbDebug("vgId:1, sdb:%s, object:%s not exist in hash, ignore delete action", pTable->name,
+ sdbGetKeyStr(pTable, pHead->cont));
+ return TSDB_CODE_SUCCESS;
+ }
+ SSdbRow row = {.pTable = pTable, .pObj = pObj};
+ return sdbDeleteHash(pTable, &row);
+}
+
+static int32_t sdbPerformUpdateAction(SWalHead *pHead, SSdbTable *pTable) {
+ void *pObj = sdbGetRowMeta(pTable, pHead->cont);
+ if (pObj == NULL) {
+ sdbDebug("vgId:1, sdb:%s, object:%s not exist in hash, ignore update action", pTable->name,
+ sdbGetKeyStr(pTable, pHead->cont));
+ return TSDB_CODE_SUCCESS;
+ }
+ SSdbRow row = {.rowSize = pHead->len, .rowData = pHead->cont, .pTable = pTable};
+ (*pTable->fpDecode)(&row);
+ return sdbUpdateHash(pTable, &row);
+}
+
+static int32_t sdbProcessWrite(void *wparam, void *hparam, int32_t qtype, void *unused) {
SSdbRow *pRow = wparam;
SWalHead *pHead = hparam;
int32_t tableId = pHead->msgType / 10;
@@ -568,6 +597,8 @@ static int sdbProcessWrite(void *wparam, void *hparam, int32_t qtype, void *unus
SSdbTable *pTable = sdbGetTableFromId(tableId);
assert(pTable != NULL);
+ if (qtype == TAOS_QTYPE_QUERY) return sdbPerformDeleteAction(pHead, pTable);
+
pthread_mutex_lock(&tsSdbMgmt.mutex);
if (pHead->version == 0) {
@@ -627,28 +658,17 @@ static int sdbProcessWrite(void *wparam, void *hparam, int32_t qtype, void *unus
// from wal or forward msg, row not created, should add into hash
if (action == SDB_ACTION_INSERT) {
- SSdbRow row = {.rowSize = pHead->len, .rowData = pHead->cont, .pTable = pTable};
- code = (*pTable->fpDecode)(&row);
- return sdbInsertHash(pTable, &row);
+ return sdbPerformInsertAction(pHead, pTable);
} else if (action == SDB_ACTION_DELETE) {
- void *pObj = sdbGetRowMeta(pTable, pHead->cont);
- if (pObj == NULL) {
- sdbDebug("vgId:1, sdb:%s, object:%s not exist in hash, ignore delete action", pTable->name,
- sdbGetKeyStr(pTable, pHead->cont));
+ if (qtype == TAOS_QTYPE_FWD) {
+ // Drop database/stable may take a long time and cause a timeout, so we confirm first then reput it into queue
+ sdbWriteFwdToQueue(1, hparam, TAOS_QTYPE_QUERY, unused);
return TSDB_CODE_SUCCESS;
+ } else {
+ return sdbPerformDeleteAction(pHead, pTable);
}
- SSdbRow row = {.pTable = pTable, .pObj = pObj};
- return sdbDeleteHash(pTable, &row);
} else if (action == SDB_ACTION_UPDATE) {
- void *pObj = sdbGetRowMeta(pTable, pHead->cont);
- if (pObj == NULL) {
- sdbDebug("vgId:1, sdb:%s, object:%s not exist in hash, ignore update action", pTable->name,
- sdbGetKeyStr(pTable, pHead->cont));
- return TSDB_CODE_SUCCESS;
- }
- SSdbRow row = {.rowSize = pHead->len, .rowData = pHead->cont, .pTable = pTable};
- code = (*pTable->fpDecode)(&row);
- return sdbUpdateHash(pTable, &row);
+ return sdbPerformUpdateAction(pHead, pTable);
} else {
return TSDB_CODE_MND_INVALID_MSG_TYPE;
}
@@ -966,7 +986,7 @@ static void sdbFreeFromQueue(SSdbRow *pRow) {
taosFreeQitem(pRow);
}
-static int32_t sdbWriteWalToQueue(int32_t vgId, void *wparam, int32_t qtype, void *rparam) {
+static int32_t sdbWriteFwdToQueue(int32_t vgId, void *wparam, int32_t qtype, void *rparam) {
SWalHead *pHead = wparam;
int32_t size = sizeof(SSdbRow) + sizeof(SWalHead) + pHead->len;
diff --git a/src/sync/src/syncMain.c b/src/sync/src/syncMain.c
index bc3803b732..d2d6d2d7fa 100644
--- a/src/sync/src/syncMain.c
+++ b/src/sync/src/syncMain.c
@@ -1279,7 +1279,7 @@ static void syncMonitorFwdInfos(void *param, void *tmrId) {
sDebug("vgId:%d, forward info expired, hver:%" PRIu64 " curtime:%" PRIu64 " savetime:%" PRIu64, pNode->vgId,
pFwdInfo->version, time, pFwdInfo->time);
- syncProcessFwdAck(pNode, pFwdInfo, TSDB_CODE_RPC_NETWORK_UNAVAIL);
+ syncProcessFwdAck(pNode, pFwdInfo, TSDB_CODE_SYN_CONFIRM_EXPIRED);
}
syncRemoveConfirmedFwdInfo(pNode);
diff --git a/tests/pytest/fulltest.sh b/tests/pytest/fulltest.sh
index 525fbad6c1..0e3b482e3d 100755
--- a/tests/pytest/fulltest.sh
+++ b/tests/pytest/fulltest.sh
@@ -158,6 +158,7 @@ python3 ./test.py -f query/bug1471.py
python3 ./test.py -f query/bug1874.py
python3 ./test.py -f query/bug1875.py
python3 ./test.py -f query/bug1876.py
+python3 ./test.py -f query/bug2218.py
#stream
python3 ./test.py -f stream/metric_1.py
@@ -205,7 +206,9 @@ python3 test.py -f query/queryInterval.py
python3 test.py -f query/queryFillTest.py
# tools
-python3 test.py -f tools/taosdemo.py
+python3 test.py -f tools/taosdemoTest.py
+python3 test.py -f tools/taosdumpTest.py
+python3 test.py -f tools/lowaTest.py
# subscribe
python3 test.py -f subscribe/singlemeter.py
diff --git a/tests/pytest/insert/restfulInsert.py b/tests/pytest/insert/restfulInsert.py
index 9fa1f33a24..c39d1cbe48 100644
--- a/tests/pytest/insert/restfulInsert.py
+++ b/tests/pytest/insert/restfulInsert.py
@@ -55,9 +55,12 @@ class RestfulInsert:
def insertUnlimitedData(self, threadID):
print("thread %d started" % threadID)
tablesPerThread = int (self.numOfTables / self.numOfThreads)
+
+ count = 0
while True:
i = 0
- start = self.ts
+ start = self.ts + count * self.batchSize
+ count = count + 1
for i in range(tablesPerThread):
tableID = i + threadID * tablesPerThread
@@ -65,7 +68,7 @@ class RestfulInsert:
data = "insert into %s.%s%d values" % (self.dbname, self.tableNamePerfix, tableID)
values = []
for k in range(self.batchSize):
- values.append("(%d, %d, %d, %d)" % (start + j * self.batchSize + k, random.randint(1, 100), random.randint(1, 100), random.randint(1, 100)))
+ values.append("(%d, %d, %d, %d)" % (start + k, random.randint(1, 100), random.randint(1, 100), random.randint(1, 100)))
if(self.outOfOrder == False):
for k in range(len(values)):
diff --git a/tests/pytest/query/bug2218.py b/tests/pytest/query/bug2218.py
new file mode 100644
index 0000000000..bb92e5d9ce
--- /dev/null
+++ b/tests/pytest/query/bug2218.py
@@ -0,0 +1,54 @@
+###################################################################
+# Copyright (c) 2016 by TAOS Technologies, Inc.
+# All rights reserved.
+#
+# This file is proprietary and confidential to TAOS Technologies.
+# No part of this file may be reproduced, stored, transmitted,
+# disclosed or used in any form or by any means other than as
+# expressly provided by the written permission from Jianhui Tao
+#
+###################################################################
+
+# -*- coding: utf-8 -*-
+
+import sys
+from util.log import *
+from util.cases import *
+from util.sql import *
+from util.dnodes import *
+class TDTestCase:
+ def init(self, conn, logSql):
+ tdLog.debug("start to execute %s" % __file__)
+ tdSql.init(conn.cursor(), logSql)
+
+ def run(self):
+ tdSql.prepare()
+ print("==========step1")
+ print("create table && insert data")
+
+ tdSql.execute("create table mt0 (ts timestamp, c1 int, c2 float, c3 bigint, c4 smallint, c5 tinyint, c6 double, c7 bool)")
+ insertRows = 1000
+ t0 = 1604298064000
+ tdLog.info("insert %d rows" % (insertRows))
+ for i in range(insertRows):
+ ret = tdSql.execute(
+ "insert into mt0 values (%d , %d,%d,%d,%d,%d,%d,%d)" %
+ (t0+i,i%100,i/2,i%100,i%100,i%100,i*1.0,i%2))
+ print("==========step2")
+ print("test col*1*1 desc ")
+ tdSql.query('select c1,c1*1*1,c2*1*1,c3*1*1,c4*1*1,c5*1*1,c6*1*1 from mt0 order by ts desc limit 2')
+ tdSql.checkData(0,0,99)
+ tdSql.checkData(0,1,0.0)
+ tdSql.checkData(0,2,0.0)
+ tdSql.checkData(0,3,0.0)
+ tdSql.checkData(0,4,0.0)
+ tdSql.checkData(0,5,0.0)
+ tdSql.checkData(0,6,0.0)
+
+
+ def stop(self):
+ tdSql.close()
+ tdLog.success("%s successfully executed" % __file__)
+
+tdCases.addWindows(__file__, TDTestCase())
+tdCases.addLinux(__file__, TDTestCase())
\ No newline at end of file
diff --git a/tests/pytest/query/filterOtherTypes.py b/tests/pytest/query/filterOtherTypes.py
index 85d5a67bef..f80552138d 100644
--- a/tests/pytest/query/filterOtherTypes.py
+++ b/tests/pytest/query/filterOtherTypes.py
@@ -376,11 +376,9 @@ class TDTestCase:
tdSql.execute("insert into t1 values(1538548685000, 1) (1538548685001, 2) (1538548685002, 3)")
tdSql.execute("insert into t2 values(1538548685000, 4) (1538548685001, 5) (1538548685002, 6)")
- tdSql.query("select * from t1 where tag1 like '%g'")
- tdSql.checkRows(3)
+ tdSql.error("select * from t1 where tag1 like '%g'")
- tdSql.query("select * from t2 where tag1 like '%g'")
- tdSql.checkRows(3)
+ tdSql.error("select * from t2 where tag1 like '%g'")
tdSql.query("select * from meters where tag1 like '%g'")
tdSql.checkRows(6)
@@ -396,20 +394,16 @@ class TDTestCase:
tdSql.execute("insert into t5 values(1538548685000, 1) (1538548685001, 2) (1538548685002, 3)")
tdSql.execute("insert into t6 values(1538548685000, 1) (1538548685001, 2) (1538548685002, 3)")
- tdSql.query("select * from t3 where tag1 like '%京'")
- tdSql.checkRows(3)
+ tdSql.error("select * from t3 where tag1 like '%京'")
- tdSql.query("select * from t4 where tag1 like '%京'")
- tdSql.checkRows(3)
+ tdSql.error("select * from t4 where tag1 like '%京'")
tdSql.query("select * from meters1 where tag1 like '%京'")
tdSql.checkRows(6)
- tdSql.query("select * from t5 where tag1 like '%g'")
- tdSql.checkRows(3)
+ tdSql.error("select * from t5 where tag1 like '%g'")
- tdSql.query("select * from t6 where tag1 like '%g'")
- tdSql.checkRows(3)
+ tdSql.error("select * from t6 where tag1 like '%g'")
tdSql.query("select * from meters1 where tag1 like '%g'")
tdSql.checkRows(6)
diff --git a/tests/pytest/query/queryJoin.py b/tests/pytest/query/queryJoin.py
index 57b7e8868d..59e01615b4 100644
--- a/tests/pytest/query/queryJoin.py
+++ b/tests/pytest/query/queryJoin.py
@@ -175,6 +175,8 @@ class TDTestCase:
tdSql.error("select count(join_mt0.c1), first(join_mt0.c1)-first(join_mt1.c1), first(join_mt1.c9) from join_mt0, join_mt1 where join_mt0.t1=join_mt1.t1 and join_mt0.ts=join_mt1.ts")
tdSql.error("select count(join_mt0.c1), first(join_mt0.c1), first(join_mt1.c9) from join_mt0, join_mt1 where join_mt0.t1=join_mt1.t1 and join_mt0.ts=join_mt1.ts interval(10a) group by join_mt0.t1, join_mt0.t2 order by join_mt0.t1 desc slimit 3")
tdSql.error("select count(join_mt0.c1), first(join_mt0.c1) from join_mt0, join_mt1 where join_mt0.t1=join_mt1.t1 and join_mt0.ts=join_mt1.ts interval(10a) group by join_mt0.t1, join_mt0.t2, join_mt1.t1 order by join_mt0.ts desc, join_mt1.ts asc limit 10;")
+ tdSql.error("select join_mt1.c1,join_mt0.c1 from join_mt1,join_mt0 where join_mt1.ts = join_mt0.ts and join_mt1.t1 = join_mt0.t1 order by t")
+
def stop(self):
tdSql.close()
diff --git a/tests/pytest/tools/lowa.py b/tests/pytest/tools/lowaTest.py
similarity index 100%
rename from tests/pytest/tools/lowa.py
rename to tests/pytest/tools/lowaTest.py
diff --git a/tests/pytest/tools/taosdemo.py b/tests/pytest/tools/taosdemoTest.py
similarity index 100%
rename from tests/pytest/tools/taosdemo.py
rename to tests/pytest/tools/taosdemoTest.py
diff --git a/tests/pytest/tools/taosdumpTest.py b/tests/pytest/tools/taosdumpTest.py
new file mode 100644
index 0000000000..534a477b34
--- /dev/null
+++ b/tests/pytest/tools/taosdumpTest.py
@@ -0,0 +1,89 @@
+###################################################################
+# Copyright (c) 2016 by TAOS Technologies, Inc.
+# All rights reserved.
+#
+# This file is proprietary and confidential to TAOS Technologies.
+# No part of this file may be reproduced, stored, transmitted,
+# disclosed or used in any form or by any means other than as
+# expressly provided by the written permission from Jianhui Tao
+#
+###################################################################
+
+# -*- coding: utf-8 -*-
+
+import sys
+import os
+from util.log import *
+from util.cases import *
+from util.sql import *
+from util.dnodes import *
+
+
+class TDTestCase:
+ def init(self, conn, logSql):
+ tdLog.debug("start to execute %s" % __file__)
+ tdSql.init(conn.cursor(), logSql)
+
+ self.ts = 1538548685000
+ self.numberOfTables = 10000
+ self.numberOfRecords = 100
+
+ def run(self):
+ tdSql.prepare()
+
+ tdSql.execute("create table st(ts timestamp, c1 int, c2 nchar(10)) tags(t1 int, t2 binary(10))")
+ tdSql.execute("create table t1 using st tags(1, 'beijing')")
+ sql = "insert into t1 values"
+ currts = self.ts
+ for i in range(100):
+ sql += "(%d, %d, 'nchar%d')" % (currts + i, i % 100, i % 100)
+ tdSql.execute(sql)
+
+ tdSql.execute("create table t2 using st tags(2, 'shanghai')")
+ sql = "insert into t2 values"
+ currts = self.ts
+ for i in range(100):
+ sql += "(%d, %d, 'nchar%d')" % (currts + i, i % 100, i % 100)
+ tdSql.execute(sql)
+
+ os.system("taosdump --databases db -o /tmp")
+
+ tdSql.execute("drop database db")
+ tdSql.query("show databases")
+ tdSql.checkRows(0)
+
+ os.system("taosdump -i /tmp")
+
+ tdSql.query("show databases")
+ tdSql.checkRows(1)
+ tdSql.checkData(0, 0, 'db')
+
+ tdSql.execute("use db")
+ tdSql.query("show stables")
+ tdSql.checkRows(1)
+ tdSql.checkData(0, 0, 'st')
+
+ tdSql.query("show tables")
+ tdSql.checkRows(2)
+ tdSql.checkData(0, 0, 't2')
+ tdSql.checkData(1, 0, 't1')
+
+ tdSql.query("select * from t1")
+ tdSql.checkRows(100)
+ for i in range(100):
+ tdSql.checkData(i, 1, i)
+ tdSql.checkData(i, 2, "nchar%d" % i)
+
+ tdSql.query("select * from t2")
+ tdSql.checkRows(100)
+ for i in range(100):
+ tdSql.checkData(i, 1, i)
+ tdSql.checkData(i, 2, "nchar%d" % i)
+
+ def stop(self):
+ tdSql.close()
+ tdLog.success("%s successfully executed" % __file__)
+
+
+tdCases.addWindows(__file__, TDTestCase())
+tdCases.addLinux(__file__, TDTestCase())
\ No newline at end of file
diff --git a/tests/script/tmp/mnodes.sim b/tests/script/tmp/mnodes.sim
index 48dbc19cb2..de02ae741b 100644
--- a/tests/script/tmp/mnodes.sim
+++ b/tests/script/tmp/mnodes.sim
@@ -20,6 +20,10 @@ system sh/cfg.sh -n dnode1 -c maxTablesPerVnode -v 20000
system sh/cfg.sh -n dnode2 -c maxTablesPerVnode -v 20000
system sh/cfg.sh -n dnode3 -c maxTablesPerVnode -v 20000
+system sh/cfg.sh -n dnode1 -c maxVgroupsPerDb -v 20
+system sh/cfg.sh -n dnode2 -c maxVgroupsPerDb -v 20
+system sh/cfg.sh -n dnode3 -c maxVgroupsPerDb -v 20
+
system sh/cfg.sh -n dnode1 -c replica -v 3
system sh/cfg.sh -n dnode2 -c replica -v 3
system sh/cfg.sh -n dnode3 -c replica -v 3