Corrected connection-shared-by-thread flag in Crash_Gen
This commit is contained in:
parent
496932f0cc
commit
30f5a202f9
|
@ -105,10 +105,11 @@ class TDengineCursor(object):
|
||||||
def execute(self, operation, params=None):
|
def execute(self, operation, params=None):
|
||||||
"""Prepare and execute a database operation (query or command).
|
"""Prepare and execute a database operation (query or command).
|
||||||
"""
|
"""
|
||||||
if threading.get_ident() != self._threadId:
|
# if threading.get_ident() != self._threadId:
|
||||||
info ="Cursor execute:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
|
# info ="Cursor execute:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
|
||||||
print(info)
|
# raise OperationalError(info)
|
||||||
return None
|
# print(info)
|
||||||
|
# return None
|
||||||
|
|
||||||
if not operation:
|
if not operation:
|
||||||
return None
|
return None
|
||||||
|
@ -195,10 +196,11 @@ class TDengineCursor(object):
|
||||||
def fetchall(self):
|
def fetchall(self):
|
||||||
"""Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples). Note that the cursor's arraysize attribute can affect the performance of this operation.
|
"""Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples). Note that the cursor's arraysize attribute can affect the performance of this operation.
|
||||||
"""
|
"""
|
||||||
if threading.get_ident() != self._threadId:
|
# if threading.get_ident() != self._threadId:
|
||||||
info ="Cursor fetchall:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
|
# info ="[WARNING] Cursor fetchall:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
|
||||||
print(info)
|
# raise OperationalError(info)
|
||||||
return None
|
# print(info)
|
||||||
|
# return None
|
||||||
if self._result is None or self._fields is None:
|
if self._result is None or self._fields is None:
|
||||||
raise OperationalError("Invalid use of fetchall")
|
raise OperationalError("Invalid use of fetchall")
|
||||||
|
|
||||||
|
@ -243,10 +245,11 @@ class TDengineCursor(object):
|
||||||
def _handle_result(self):
|
def _handle_result(self):
|
||||||
"""Handle the return result from query.
|
"""Handle the return result from query.
|
||||||
"""
|
"""
|
||||||
if threading.get_ident() != self._threadId:
|
# if threading.get_ident() != self._threadId:
|
||||||
info = "Cursor handleresult:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
|
# info = "Cursor handleresult:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
|
||||||
print(info)
|
# raise OperationalError(info)
|
||||||
return None
|
# print(info)
|
||||||
|
# return None
|
||||||
|
|
||||||
self._description = []
|
self._description = []
|
||||||
for ele in self._fields:
|
for ele in self._fields:
|
||||||
|
|
|
@ -106,6 +106,7 @@ class WorkerThread:
|
||||||
logger.info("Starting to run thread: {}".format(self._tid))
|
logger.info("Starting to run thread: {}".format(self._tid))
|
||||||
|
|
||||||
if ( gConfig.per_thread_db_connection ): # type: ignore
|
if ( gConfig.per_thread_db_connection ): # type: ignore
|
||||||
|
logger.debug("Worker thread openning database connection")
|
||||||
self._dbConn.open()
|
self._dbConn.open()
|
||||||
|
|
||||||
self._doTaskLoop()
|
self._doTaskLoop()
|
||||||
|
@ -246,6 +247,7 @@ class ThreadCoordinator:
|
||||||
logger.debug("[STT] starting transitions")
|
logger.debug("[STT] starting transitions")
|
||||||
sm.transition(self._executedTasks) # at end of step, transiton the DB state
|
sm.transition(self._executedTasks) # at end of step, transiton the DB state
|
||||||
logger.debug("[STT] transition ended")
|
logger.debug("[STT] transition ended")
|
||||||
|
# Due to limitation (or maybe not) of the Python library, we cannot share connections across threads
|
||||||
if sm.hasDatabase() :
|
if sm.hasDatabase() :
|
||||||
for t in self._pool.threadList:
|
for t in self._pool.threadList:
|
||||||
logger.debug("[DB] use db for all worker threads")
|
logger.debug("[DB] use db for all worker threads")
|
||||||
|
@ -1492,7 +1494,7 @@ def main():
|
||||||
help='Run TDengine service in foreground (default: false)')
|
help='Run TDengine service in foreground (default: false)')
|
||||||
parser.add_argument('-l', '--larger-data', action='store_true',
|
parser.add_argument('-l', '--larger-data', action='store_true',
|
||||||
help='Write larger amount of data during write operations (default: false)')
|
help='Write larger amount of data during write operations (default: false)')
|
||||||
parser.add_argument('-p', '--per-thread-db-connection', action='store_false',
|
parser.add_argument('-p', '--per-thread-db-connection', action='store_true',
|
||||||
help='Use a single shared db connection (default: false)')
|
help='Use a single shared db connection (default: false)')
|
||||||
parser.add_argument('-r', '--record-ops', action='store_true',
|
parser.add_argument('-r', '--record-ops', action='store_true',
|
||||||
help='Use a pair of always-fsynced fils to record operations performing + performed, for power-off tests (default: false)')
|
help='Use a pair of always-fsynced fils to record operations performing + performed, for power-off tests (default: false)')
|
||||||
|
@ -1503,6 +1505,7 @@ def main():
|
||||||
|
|
||||||
global gConfig
|
global gConfig
|
||||||
gConfig = parser.parse_args()
|
gConfig = parser.parse_args()
|
||||||
|
|
||||||
# if len(sys.argv) == 1:
|
# if len(sys.argv) == 1:
|
||||||
# parser.print_help()
|
# parser.print_help()
|
||||||
# sys.exit()
|
# sys.exit()
|
||||||
|
|
Loading…
Reference in New Issue