Merge branch '3.0' of github.com:taosdata/TDengine into test/cover1

This commit is contained in:
happyguoxy 2025-03-21 17:51:35 +08:00
commit e6be81cc84
289 changed files with 8715 additions and 7371 deletions

View File

@ -90,6 +90,16 @@ jobs:
which taosadapter which taosadapter
which taoskeeper which taoskeeper
- name: Statistics ldd
run: |
find ${{ github.workspace }}/debug/build/lib -type f -name "*.so" -print0 | xargs -0 ldd || true
find ${{ github.workspace }}/debug/build/bin -type f -print0 | xargs -0 ldd || true
- name: Statistics size
run: |
find ${{ github.workspace }}/debug/build/lib -type f -print0 | xargs -0 ls -lhrS
find ${{ github.workspace }}/debug/build/bin -type f -print0 | xargs -0 ls -lhrS
- name: Start taosd - name: Start taosd
run: | run: |
cp /etc/taos/taos.cfg ./ cp /etc/taos/taos.cfg ./

View File

@ -12,6 +12,7 @@ on:
- 'tools/tdgpt/**' - 'tools/tdgpt/**'
- 'source/libs/executor/src/forecastoperator.c' - 'source/libs/executor/src/forecastoperator.c'
- 'source/libs/executor/src/anomalywindowoperator.c' - 'source/libs/executor/src/anomalywindowoperator.c'
- 'source/dnode/mnode/impl/src/mndAnode.c'
- 'include/common/tanalytics.h' - 'include/common/tanalytics.h'
- 'source/common/src/tanalytics.c' - 'source/common/src/tanalytics.c'
- 'tests/parallel/tdgpt_cases.task' - 'tests/parallel/tdgpt_cases.task'

View File

@ -12,6 +12,7 @@ on:
- 'tools/tdgpt/**' - 'tools/tdgpt/**'
- 'source/libs/executor/src/forecastoperator.c' - 'source/libs/executor/src/forecastoperator.c'
- 'source/libs/executor/src/anomalywindowoperator.c' - 'source/libs/executor/src/anomalywindowoperator.c'
- 'source/dnode/mnode/impl/src/mndAnode.c'
- 'include/common/tanalytics.h' - 'include/common/tanalytics.h'
- 'source/common/src/tanalytics.c' - 'source/common/src/tanalytics.c'
- 'tests/parallel/tdgpt_cases.task' - 'tests/parallel/tdgpt_cases.task'

1
.gitignore vendored
View File

@ -59,7 +59,6 @@ tools/upx*
html/ html/
/.vs /.vs
/CMakeFiles/3.10.2 /CMakeFiles/3.10.2
/CMakeCache.txt
/Makefile /Makefile
/*.cmake /*.cmake
/src/cq/test/CMakeFiles/cqtest.dir/*.cmake /src/cq/test/CMakeFiles/cqtest.dir/*.cmake

View File

@ -361,6 +361,7 @@ def pre_test_build_win() {
pip3 install taospy==2.7.21 pip3 install taospy==2.7.21
pip3 install taos-ws-py==0.3.8 pip3 install taos-ws-py==0.3.8
xcopy /e/y/i/f %WIN_INTERNAL_ROOT%\\debug\\build\\lib\\taos.dll C:\\Windows\\System32 xcopy /e/y/i/f %WIN_INTERNAL_ROOT%\\debug\\build\\lib\\taos.dll C:\\Windows\\System32
xcopy /e/y/i/f %WIN_INTERNAL_ROOT%\\debug\\build\\lib\\taosnative.dll C:\\Windows\\System32
''' '''
return 1 return 1
} }
@ -379,7 +380,9 @@ def run_win_test() {
bat ''' bat '''
echo "windows test ..." echo "windows test ..."
xcopy /e/y/i/f %WIN_INTERNAL_ROOT%\\debug\\build\\lib\\taos.dll C:\\Windows\\System32 xcopy /e/y/i/f %WIN_INTERNAL_ROOT%\\debug\\build\\lib\\taos.dll C:\\Windows\\System32
xcopy /e/y/i/f %WIN_INTERNAL_ROOT%\\debug\\build\\lib\\taosnative.dll C:\\Windows\\System32
ls -l C:\\Windows\\System32\\taos.dll ls -l C:\\Windows\\System32\\taos.dll
ls -l C:\\Windows\\System32\\taosnative.dll
time /t time /t
cd %WIN_SYSTEM_TEST_ROOT% cd %WIN_SYSTEM_TEST_ROOT%
echo "testing ..." echo "testing ..."

View File

@ -99,12 +99,8 @@ ENDIF()
SET(TAOS_LIB taos) SET(TAOS_LIB taos)
SET(TAOS_LIB_STATIC taos_static) SET(TAOS_LIB_STATIC taos_static)
SET(TAOS_NATIVE_LIB taosnative)
IF(${TD_WINDOWS}) SET(TAOS_NATIVE_LIB_STATIC taosnative_static)
SET(TAOS_LIB_PLATFORM_SPEC taos_static)
ELSE()
SET(TAOS_LIB_PLATFORM_SPEC taos)
ENDIF()
# build TSZ by default # build TSZ by default
IF("${TSZ_ENABLED}" MATCHES "false") IF("${TSZ_ENABLED}" MATCHES "false")

View File

@ -2,7 +2,7 @@
# taosws-rs # taosws-rs
ExternalProject_Add(taosws-rs ExternalProject_Add(taosws-rs
GIT_REPOSITORY https://github.com/taosdata/taos-connector-rust.git GIT_REPOSITORY https://github.com/taosdata/taos-connector-rust.git
GIT_TAG main GIT_TAG 3.0
SOURCE_DIR "${TD_SOURCE_DIR}/tools/taosws-rs" SOURCE_DIR "${TD_SOURCE_DIR}/tools/taosws-rs"
BINARY_DIR "" BINARY_DIR ""
#BUILD_IN_SOURCE TRUE #BUILD_IN_SOURCE TRUE

View File

@ -191,7 +191,7 @@ INTERVAL(interval_val [, interval_offset])
The time window clause includes 3 sub-clauses: The time window clause includes 3 sub-clauses:
- INTERVAL clause: used to generate windows of equal time periods, where interval_val specifies the size of each time window, and interval_offset specifies; - INTERVAL clause: used to generate windows of equal time periods, where interval_val specifies the size of each time window, and interval_offset specifies its starting offset. By default, windows begin at Unix time 0 (1970-01-01 00:00:00 UTC). If interval_offset is specified, the windows start from "Unix time 0 + interval_offset";
- SLIDING clause: used to specify the time the window slides forward; - SLIDING clause: used to specify the time the window slides forward;
- FILL: used to specify the filling mode of data in case of missing data in the window interval. - FILL: used to specify the filling mode of data in case of missing data in the window interval.

View File

@ -146,9 +146,19 @@ Not supported
``` ```
</TabItem> </TabItem>
<TabItem label="C" value="c"> <TabItem label="C" value="c">
The example code for binding parameters with stmt2 (TDengine v3.3.5.0 or higher is required) is as follows:
```c
{{#include docs/examples/c/stmt2_insert_demo.c}}
```
The example code for binding parameters with stmt is as follows:
```c ```c
{{#include docs/examples/c/stmt_insert_demo.c}} {{#include docs/examples/c/stmt_insert_demo.c}}
``` ```
</TabItem> </TabItem>
<TabItem label="REST API" value="rest"> <TabItem label="REST API" value="rest">
Not supported Not supported

View File

@ -55,7 +55,7 @@ When network I/O and other processing resources are not bottlenecks, by optimizi
Generally, when TDengine needs to select a mount point from the same level to create a new data file, it uses a round-robin strategy for selection. However, in reality, each disk may have different capacities, or the same capacity but different amounts of data written, leading to an imbalance in available space on each disk. In practice, this may result in selecting a disk with very little remaining space. Generally, when TDengine needs to select a mount point from the same level to create a new data file, it uses a round-robin strategy for selection. However, in reality, each disk may have different capacities, or the same capacity but different amounts of data written, leading to an imbalance in available space on each disk. In practice, this may result in selecting a disk with very little remaining space.
To address this issue, starting from 3.1.1.0, a new configuration minDiskFreeSize was introduced. When the available space on a disk is less than or equal to this threshold, that disk will no longer be selected for generating new data files. The unit of this configuration item is bytes, and its value should be greater than 2GB, i.e., mount points with less than 2GB of available space will be skipped. To address this issue, starting from 3.1.1.0, a new configuration minDiskFreeSize was introduced. When the available space on a disk is less than or equal to this threshold, that disk will no longer be selected for generating new data files. The unit of this configuration item is bytes. If its value is set as 2GB, i.e., mount points with less than 2GB of available space will be skipped.
Starting from version 3.3.2.0, a new configuration `disable_create_new_file` has been introduced to control the prohibition of generating new files on a certain mount point. The default value is `false`, which means new files can be generated on each mount point by default. Starting from version 3.3.2.0, a new configuration `disable_create_new_file` has been introduced to control the prohibition of generating new files on a certain mount point. The default value is `false`, which means new files can be generated on each mount point by default.

View File

@ -0,0 +1,278 @@
---
sidebar_label: Security Configuration
title: Security Configuration
toc_max_heading_level: 4
---
import Image from '@theme/IdealImage';
import imgEcosys from '../assets/tdengine-components-01.png';
## Background
The distributed and multi-component nature of TDengine makes its security configuration a concern in production systems. This document aims to explain the security issues of various TDengine components and different deployment methods, and provide deployment and configuration suggestions to support the security of user data.
## Components Involved in Security Configuration
TDengine includes multiple components:
- `taosd`: Core component.
- `taosc`: Client library.
- `taosAdapter`: REST API and WebSocket service.
- `taosKeeper`: Monitoring service component.
- `taosX`: Data pipeline and backup recovery component.
- `taosxAgent`: Auxiliary component for external data source access.
- `taosExplorer`: Web visualization management interface.
In addition to TDengine deployment and applications, there are also the following components:
- Applications that access and use the TDengine database through various connectors.
- External data sources: Other data sources that access TDengine, such as MQTT, OPC, Kafka, etc.
The relationship between the components is as follows:
<figure>
<Image img={imgEcosys} alt="TDengine ecosystem"/>
<figcaption>TDengine ecosystem</figcaption>
</figure>
## TDengine Security Settings
### `taosd`
The `taosd` cluster uses TCP connections based on its own protocol for data exchange, which has low risk, but the transmission process is not encrypted, so there is still some security risk.
Enabling compression may help with TCP data obfuscation.
- **compressMsgSize**: Whether to compress RPC messages. Integer, optional: -1: Do not compress any messages; 0: Compress all messages; N (N>0): Only compress messages larger than N bytes.
To ensure the traceability of database operations, it is recommended to enable the audit function.
- **audit**: Audit function switch, 0 is off, 1 is on. Default is on.
- **auditInterval**: Reporting interval, in milliseconds. Default is 5000.
- **auditCreateTable**: Whether to enable the audit function for creating sub-tables. 0 is off, 1 is on. Default is on.
To ensure the security of data files, database encryption can be enabled.
- **encryptAlgorithm**: Data encryption algorithm.
- **encryptScope**: Data encryption scope.
Enabling the whitelist can restrict access addresses and further enhance privacy.
- **enableWhiteList**: Whitelist function switch, 0 is off, 1 is on; default is off.
### `taosc`
Users and other components use the native client library (`taosc`) and its own protocol to connect to `taosd`, which has low data security risk, but the transmission process is still not encrypted, so there is some security risk.
### `taosAdapter`
`taosAdapter` uses the native client library (`taosc`) and its own protocol to connect to `taosd`, and also supports RPC message compression, so there is no data security issue.
Applications and other components connect to `taosAdapter` through various language connectors. By default, the connection is based on HTTP 1.1 and is not encrypted. To ensure the security of data transmission between `taosAdapter` and other components, SSL encrypted connections need to be configured. Modify the following configuration in the `/etc/taos/taosadapter.toml` configuration file:
```toml
[ssl]
enable = true
certFile = "/path/to/certificate-file"
keyFile = "/path/to/private-key"
```
Configure HTTPS/SSL access in the connector to complete encrypted access.
To further enhance security, the whitelist function can be enabled, and configured in `taosd`, which also applies to the `taosAdapter` component.
### `taosX`
`taosX` includes REST API and gRPC interfaces, where the gRPC interface is used for `taos-agent` connections.
- The REST API interface is based on HTTP 1.1 and is not encrypted, posing a security risk.
- The gRPC interface is based on HTTP 2 and is not encrypted, posing a security risk.
To ensure data security, it is recommended that the `taosX` API interface is limited to internal access only. Modify the following configuration in the `/etc/taos/taosx.toml` configuration file:
```toml
[serve]
listen = "127.0.0.1:6050"
grpc = "127.0.0.1:6055"
```
Starting from TDengine 3.3.6.0, `taosX` supports HTTPS connections. Add the following configuration in the `/etc/taos/taosx.toml` file:
```toml
[serve]
ssl_cert = "/path/to/server.pem"
ssl_key = "/path/to/server.key"
ssl_ca = "/path/to/ca.pem"
```
And modify the API address to HTTPS connection in Explorer:
```toml
# Local connection to taosX API
x_api = "https://127.0.01:6050"
# Public IP or domain address
grpc = "https://public.domain.name:6055"
```
### `taosExplorer`
Similar to the `taosAdapter` component, the `taosExplorer` component provides HTTP services for external access. Modify the following configuration in the `/etc/taos/explorer.toml` configuration file:
```toml
[ssl]
# SSL certificate file
certificate = "/path/to/ca.file"
# SSL certificate private key
certificate_key = "/path/to/key.file"
```
Then, use HTTPS to access Explorer, such as [https://192.168.12.34](https://192.168.12.34:6060).
### `taosxAgent`
After `taosX` enables HTTPS, the `Agent` component and `taosX` use HTTP 2 encrypted connections, using Arrow-Flight RPC for data exchange. The transmission content is in binary format, and only registered `Agent` connections are valid, ensuring data security.
It is recommended to always enable HTTPS connections for `Agent` services in insecure or public network environments.
### `taosKeeper`
`taosKeeper` uses WebSocket connections to communicate with `taosAdapter`, writing monitoring information reported by other components into TDengine.
The current version of `taosKeeper` has security risks:
- The monitoring address cannot be restricted to the local machine. By default, it monitors all addresses on port 6043, posing a risk of network attacks. This risk can be ignored when deploying with Docker or Kubernetes without exposing the `taosKeeper` port.
- The configuration file contains plaintext passwords, so the visibility of the configuration file needs to be reduced. In `/etc/taos/taoskeeper.toml`:
```toml
[tdengine]
host = "localhost"
port = 6041
username = "root"
password = "taosdata"
usessl = false
```
## Security Enhancements
We recommend using TDengine within a local area network.
If you must provide access outside the local area network, consider adding the following configurations:
### Load Balancing
Use load balancing to provide `taosAdapter` services externally.
Take Nginx as an example to configure multi-node load balancing:
```nginx
http {
server {
listen 6041;
location / {
proxy_pass http://websocket;
# Headers for websocket compatible
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
# Forwarded headers
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-Port $server_port;
proxy_set_header X-Forwarded-Server $hostname;
proxy_set_header X-Real-IP $remote_addr;
}
}
upstream websocket {
server 192.168.11.61:6041;
server 192.168.11.62:6041;
server 192.168.11.63:6041;
}
}
```
If the `taosAdapter` component is not configured with SSL secure connections, SSL needs to be configured to ensure secure access. SSL can be configured at a higher-level API Gateway or in Nginx; if you have stronger security requirements for the connections between components, you can configure SSL in all components. The Nginx configuration is as follows:
```nginx
http {
server {
listen 443 ssl;
ssl_certificate /path/to/your/certificate.crt;
ssl_certificate_key /path/to/your/private.key;
}
}
```
### Security Gateway
In modern internet production systems, the use of security gateways is also very common. [traefik](https://traefik.io/) is a good open-source choice. We take traefik as an example to explain the security configuration in the API gateway.
Traefik provides various security configurations through middleware, including:
1. Authentication: Traefik provides multiple authentication methods such as BasicAuth, DigestAuth, custom authentication middleware, and OAuth 2.0.
2. IP Whitelist: Restrict the allowed client IPs.
3. Rate Limit: Control the number of requests sent to the service.
4. Custom Headers: Add configurations such as `allowedHosts` through custom headers to improve security.
A common middleware example is as follows:
```yaml
labels:
- "traefik.enable=true"
- "traefik.http.routers.tdengine.rule=Host(`api.tdengine.example.com`)"
- "traefik.http.routers.tdengine.entrypoints=https"
- "traefik.http.routers.tdengine.tls.certresolver=default"
- "traefik.http.routers.tdengine.service=tdengine"
- "traefik.http.services.tdengine.loadbalancer.server.port=6041"
- "traefik.http.middlewares.redirect-to-https.redirectscheme.scheme=https"
- "traefik.http.middlewares.check-header.headers.customrequestheaders.X-Secret-Header=SecretValue"
- "traefik.http.middlewares.check-header.headers.customresponseheaders.X-Header-Check=true"
- "traefik.http.middlewares.tdengine-ipwhitelist.ipwhitelist.sourcerange=127.0.0.1/32, 192.168.1.7"
- "traefik.http.routers.tdengine.middlewares=redirect-to-https,check-header,tdengine-ipwhitelist"
```
The above example completes the following configurations:
- TLS authentication uses the `default` configuration, which can be configured in the configuration file or traefik startup parameters, as follows:
```yaml
traefik:
image: "traefik:v2.3.2"
hostname: "traefik"
networks:
- traefik
command:
- "--log.level=INFO"
- "--api.insecure=true"
- "--providers.docker=true"
- "--providers.docker.exposedbydefault=false"
- "--providers.docker.swarmmode=true"
- "--providers.docker.network=traefik"
- "--providers.docker.watch=true"
- "--entrypoints.http.address=:80"
- "--entrypoints.https.address=:443"
- "--certificatesresolvers.default.acme.dnschallenge=true"
- "--certificatesresolvers.default.acme.dnschallenge.provider=alidns"
- "--certificatesresolvers.default.acme.dnschallenge.resolvers=ns1.alidns.com"
- "--certificatesresolvers.default.acme.email=linhehuo@gmail.com"
- "--certificatesresolvers.default.acme.storage=/letsencrypt/acme.json"
```
The above startup parameters configure the `default` TSL certificate resolver and automatic acme authentication (automatic certificate application and renewal).
- Middleware `redirect-to-https`: Configure redirection from HTTP to HTTPS, forcing the use of secure connections.
```yaml
- "traefik.http.middlewares.redirect-to-https.redirectscheme.scheme=https"
```
- Middleware `check-header`: Configure custom header checks. External access must add custom headers and match header values to prevent unauthorized access. This is a very simple and effective security mechanism when providing API access.
- Middleware `tdengine-ipwhitelist`: Configure IP whitelist. Only allow specified IPs to access, using CIDR routing rules for matching, and can set internal and external IP addresses.
## Summary
Data security is a key indicator of the TDengine product. These measures are designed to protect TDengine deployments from unauthorized access and data breaches while maintaining performance and functionality. However, the security configuration of TDengine itself is not the only guarantee in production. It is more important to develop solutions that better match customer needs in combination with the user's business system.

View File

@ -0,0 +1,81 @@
---
sidebar_label: Perspective
title: Integration With Perspective
toc_max_heading_level: 4
---
Perspective is an open-source and powerful data visualization library developed by [Prospective.co](https://www.perspective.co/). Leveraging the technologies of WebAssembly and Web Workers, it enables interactive real-time data analysis in web applications and provides high-performance visualization capabilities on the browser side. With its help, developers can build dashboards, charts, etc. that update in real time, and users can easily interact with the data, filtering, sorting, and exploring it as needed. It boasts high flexibility, adapting to various data formats and business scenarios. It is also fast, ensuring smooth interaction even when dealing with large-scale data. Moreover, it has excellent usability, allowing both beginners and professional developers to quickly build visualization interfaces.
In terms of data connection, Perspective, through the Python connector of TDengine, perfectly supports TDengine data sources. It can efficiently retrieve various types of data, such as massive time-series data, from TDengine. Additionally, it offers real-time functions including the display of complex charts, in-depth statistical analysis, and trend prediction, helping users gain insights into the value of the data and providing strong support for decision-making. It is an ideal choice for building applications with high requirements for real-time data visualization and analysis.
![perspective-architecture](./perspective/prsp_architecture.webp)
## Prerequisites
Perform the following installation operations in the Linux system:
- TDengine is installed and running normally (both Enterprise and Community versions are available).
- taosAdapter is running normally, refer to [taosAdapter Reference](../../../tdengine-reference/components/taosadapter/).
- Python version 3.10 or higher has been installed (if not installed, please refer to [Python Installation](https://docs.python.org/)).
- Download or clone the [perspective-connect-demo](https://github.com/taosdata/perspective-connect-demo) project. After entering the root directory of the project, run the "install.sh" script to download and install the TDengine client library and related dependencies locally.
## Visualize data
**Step 1**, Run the "run.sh" script in the root directory of the [perspective-connect-demo](https://github.com/taosdata/perspective-connect-demo) project to start the Perspective service. This service will retrieve data from the TDengine database every 300 milliseconds and transmit the data in a streaming form to the web-based `Perspective Viewer`.
```shell
sh run.sh
```
**Step 2**, Start a static web service. Then, access the prsp-viewer.html resource in the browser, and the visualized data can be displayed.
```python
python -m http.server 8081
```
The effect presented after accessing the web page through the browser is shown in the following figure:
![perspective-viewer](./perspective/prsp_view.webp)
## Instructions for use
### Write Data to TDengine
The `producer.py` script in the root directory of the [perspective-connect-demo](https://github.com/taosdata/perspective-connect-demo) project can periodically insert data into the TDengine database with the help of the TDengine Python connector. This script will generate random data and insert it into the database, thus simulating the process of writing real-time data. The specific execution steps are as follows:
1. Establish a connection to TDengine.
2. Create the `power` database and the `meters` table.
3. Generate random data every 300 milliseconds and write it into the TDengine database.
For detailed instructions on writing using the Python connector, please refer to [Python Parameter Binding](../../../tdengine-reference/client-libraries/python/#parameter-binding).
### Load Data from TDengine
The `perspective_server.py` script in the root directory of the [perspective-connect-demo](https://github.com/taosdata/perspective-connect-demo) project will start a Perspective server. This server will read data from TDengine and stream the data to a Perspective table via the Tornado WebSocket.
1. Start a Perspective server.
2. Establish a connection to TDengine.
3. Create a Perspective table (the table structure needs to match the type of the table in the TDengine database).
4. Call the `Tornado.PeriodicCallback` function to start a scheduled task, thereby achieving the update of the data in the Perspective table. The sample code is as follows:
```python
{{#include docs/examples/perspective/perspective_server.py:perspective_server}}
```
### HTML Page Configuration
The `prsp-viewer.html` file in the root directory of the [perspective-connect-demo](https://github.com/taosdata/perspective-connect-demo) project embeds the `Perspective Viewer` into the HTML page. It connects to the Perspective server via a WebSocket and displays real-time data according to the chart configuration.
- Configure the displayed charts and the rules for data analysis.
- Establish a Websocket connection with the Perspective server.
- Import the Perspective library, connect to the Perspective server via a WebSocket, and load the `meters_values` table to display dynamic data.
```html
{{#include docs/examples/perspective/prsp-viewer.html:perspective_viewer}}
```
## Reference Materials
- [Perspective Docs](https://perspective.finos.org/)
- [TDengine Python Connector](../../../tdengine-reference/client-libraries/python/)
- [TDengine Stream Processing](../../../advanced-features/stream-processing/)

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 62 KiB

View File

@ -170,7 +170,7 @@ The effective value of charset is UTF-8.
|tempDir | |Not supported |Specifies the directory for generating temporary files during system operation, default value /tmp| |tempDir | |Not supported |Specifies the directory for generating temporary files during system operation, default value /tmp|
|minimalDataDirGB | |Not supported |Minimum space to be reserved in the time-series data storage directory specified by dataDir, in GB, default value 2| |minimalDataDirGB | |Not supported |Minimum space to be reserved in the time-series data storage directory specified by dataDir, in GB, default value 2|
|minimalTmpDirGB | |Not supported |Minimum space to be reserved in the temporary file directory specified by tempDir, in GB, default value 1| |minimalTmpDirGB | |Not supported |Minimum space to be reserved in the temporary file directory specified by tempDir, in GB, default value 1|
|minDiskFreeSize |After 3.1.1.0|Supported, effective immediately |When the available space on a disk is less than or equal to this threshold, the disk will no longer be selected for generating new data files, unit is bytes, range 52428800-1073741824, default value 52428800; Enterprise parameter| |minDiskFreeSize |After 3.1.1.0|Supported, effective immediately |When the available space on a disk is less than or equal to this threshold, the disk will no longer be selected for generating new data files, unit is bytes, range 52428800-2199023255552, default value 52428800; Enterprise parameter|
|s3MigrateIntervalSec|After 3.3.4.3|Supported, effective immediately |Trigger cycle for automatic upload of local data files to S3, in seconds. Minimum: 600; Maximum: 100000. Default value 3600; Enterprise parameter| |s3MigrateIntervalSec|After 3.3.4.3|Supported, effective immediately |Trigger cycle for automatic upload of local data files to S3, in seconds. Minimum: 600; Maximum: 100000. Default value 3600; Enterprise parameter|
|s3MigrateEnabled |After 3.3.4.3|Supported, effective immediately |Whether to automatically perform S3 migration, default value is 0, which means auto S3 migration is off, can be set to 1; Enterprise parameter| |s3MigrateEnabled |After 3.3.4.3|Supported, effective immediately |Whether to automatically perform S3 migration, default value is 0, which means auto S3 migration is off, can be set to 1; Enterprise parameter|
|s3Accesskey |After 3.3.4.3|Supported, effective after restart|Colon-separated user SecretId:SecretKey, for example AKIDsQmwsfKxTo2A6nGVXZN0UlofKn6JRRSJ:lIdoy99ygEacU7iHfogaN2Xq0yumSm1E; Enterprise parameter| |s3Accesskey |After 3.3.4.3|Supported, effective after restart|Colon-separated user SecretId:SecretKey, for example AKIDsQmwsfKxTo2A6nGVXZN0UlofKn6JRRSJ:lIdoy99ygEacU7iHfogaN2Xq0yumSm1E; Enterprise parameter|

View File

@ -379,6 +379,7 @@ Specify the configuration parameters for tag and data columns in `super_tables`
`query_times` specifies the number of times to run the query, numeric type. `query_times` specifies the number of times to run the query, numeric type.
**Note: from version 3.3.5.6 and beyond, simultaneous configuration for `specified_table_query` and `super_table_query` in a JSON file is no longer supported **
For other common parameters, see [General Configuration Parameters](#general-configuration-parameters) For other common parameters, see [General Configuration Parameters](#general-configuration-parameters)
@ -508,6 +509,15 @@ Note: Data types in the taosBenchmark configuration file must be in lowercase to
</details> </details>
<details>
<summary>queryStb.json</summary>
```json
{{#include /TDengine/tools/taos-tools/example/queryStb.json}}
```
</details>
#### Subscription Example #### Subscription Example
<details> <details>

View File

@ -43,6 +43,7 @@ In TDengine, the following data types can be used in the data model of basic tab
| 16 | VARCHAR | Custom | Alias for BINARY type | | 16 | VARCHAR | Custom | Alias for BINARY type |
| 17 | GEOMETRY | Custom | Geometry type, supported starting from version 3.1.0.0 | | 17 | GEOMETRY | Custom | Geometry type, supported starting from version 3.1.0.0 |
| 18 | VARBINARY | Custom | Variable-length binary data, supported starting from version 3.1.1.0 | | 18 | VARBINARY | Custom | Variable-length binary data, supported starting from version 3.1.1.0 |
| 19 | DECIMAL | 8 or 16 | High-precision numeric type. The range of values depends on the precision and scale specified in the type. Supported starting from version 3.3.6. See the description below. |
:::note :::note
@ -61,6 +62,18 @@ In TDengine, the following data types can be used in the data model of basic tab
- VARBINARY is a data type for storing binary data, with a maximum length of 65,517 bytes for data columns and 16,382 bytes for label columns. Binary data can be written via SQL or schemaless methods (needs to be converted to a string starting with \x), or through stmt methods (can use binary directly). Displayed as hexadecimal starting with \x. - VARBINARY is a data type for storing binary data, with a maximum length of 65,517 bytes for data columns and 16,382 bytes for label columns. Binary data can be written via SQL or schemaless methods (needs to be converted to a string starting with \x), or through stmt methods (can use binary directly). Displayed as hexadecimal starting with \x.
::: :::
### DECIMAL Data Type
The `DECIMAL` data type is used for high-precision numeric storage and is supported starting from version 3.3.6. The definition syntax is: `DECIMAL(18, 2)`, `DECIMAL(38, 10)`, where two parameters must be specified: `precision` and `scale`. `Precision` refers to the maximum number of significant digits supported, and `scale` refers to the maximum number of decimal places. For example, `DECIMAL(8, 4)` represents a range of `[-9999.9999, 9999.9999]`. When defining the `DECIMAL` data type, the range of `precision` is `[1, 38]`, and the range of `scale` is `[0, precision]`. If `scale` is 0, it represents integers only. You can also omit `scale`, in which case it defaults to 0. For example, `DECIMAL(18)` is equivalent to `DECIMAL(18, 0)`.
When the `precision` value is less than or equal to 18, 8 bytes of storage (DECIMAL64) are used internally. When the `precision` is in the range `(18, 38]`, 16 bytes of storage (DECIMAL) are used. When writing `DECIMAL` type data in SQL, numeric values can be written directly. If the value exceeds the maximum representable value for the type, a `DECIMAL_OVERFLOW` error will be reported. If the value does not exceed the maximum representable value but the number of decimal places exceeds the `scale`, it will be automatically rounded. For example, if the type is defined as `DECIMAL(10, 2)` and the value `10.987` is written, the actual stored value will be `10.99`.
The `DECIMAL` type only supports regular columns and does not currently support tag columns. The `DECIMAL` type supports SQL-based writes only and does not currently support `stmt` or schemaless writes.
When performing operations between integer types and the `DECIMAL` type, the integer type is converted to the `DECIMAL` type before the calculation. When the `DECIMAL` type is involved in calculations with `DOUBLE`, `FLOAT`, `VARCHAR`, or `NCHAR` types, it is converted to `DOUBLE` type for computation.
When querying `DECIMAL` type expressions, if the intermediate result of the calculation exceeds the maximum value that the current type can represent, a `DECIMAL_OVERFLOW` error is reported.
## Constants ## Constants

View File

@ -1186,6 +1186,7 @@ CAST(expr AS type_name)
1) Invalid character situations when converting string types to numeric types, e.g., "a" might convert to 0, but will not throw an error. 1) Invalid character situations when converting string types to numeric types, e.g., "a" might convert to 0, but will not throw an error.
2) When converting to numeric types, if the value exceeds the range that `type_name` can represent, it will overflow, but will not throw an error. 2) When converting to numeric types, if the value exceeds the range that `type_name` can represent, it will overflow, but will not throw an error.
3) When converting to string types, if the converted length exceeds the length specified in `type_name`, it will be truncated, but will not throw an error. 3) When converting to string types, if the converted length exceeds the length specified in `type_name`, it will be truncated, but will not throw an error.
- The DECIMAL type does not support conversion to or from JSON, VARBINARY, or GEOMETRY types.
#### TO_ISO8601 #### TO_ISO8601
@ -1691,12 +1692,14 @@ AVG(expr)
**Function Description**: Calculates the average value of the specified field. **Function Description**: Calculates the average value of the specified field.
**Return Data Type**: DOUBLE. **Return Data Type**: DOUBLE, DECIMAL.
**Applicable Data Types**: Numeric types. **Applicable Data Types**: Numeric types.
**Applicable to**: Tables and supertables. **Applicable to**: Tables and supertables.
**Description**: When the input type is DECIMAL, the output type is also DECIMAL. The precision and scale of the output conform to the rules described in the data type section. The result type is obtained by dividing the SUM type by UINT64. If the SUM result causes a DECIMAL type overflow, a DECIMAL OVERFLOW error is reported.
### COUNT ### COUNT
```sql ```sql
@ -1847,12 +1850,14 @@ SUM(expr)
**Function Description**: Calculates the sum of a column in a table/supertable. **Function Description**: Calculates the sum of a column in a table/supertable.
**Return Data Type**: DOUBLE, BIGINT. **Return Data Type**: DOUBLE, BIGINT,DECIMAL.
**Applicable Data Types**: Numeric types. **Applicable Data Types**: Numeric types.
**Applicable to**: Tables and supertables. **Applicable to**: Tables and supertables.
**Description**: When the input type is DECIMAL, the output type is DECIMAL(38, scale), where precision is the maximum value currently supported, and scale is the scale of the input type. If the SUM result overflows, a DECIMAL OVERFLOW error is reported.
### HYPERLOGLOG ### HYPERLOGLOG
```sql ```sql
@ -2254,6 +2259,7 @@ ignore_null_values: {
- INTERP is used to obtain the record value of a specified column at the specified time slice. It has a dedicated syntax (interp_clause) when used. For syntax introduction, see [reference link](../query-data/#interp). - INTERP is used to obtain the record value of a specified column at the specified time slice. It has a dedicated syntax (interp_clause) when used. For syntax introduction, see [reference link](../query-data/#interp).
- When there is no row data that meets the conditions at the specified time slice, the INTERP function will interpolate according to the settings of the [FILL](../time-series-extensions/#fill-clause) parameter. - When there is no row data that meets the conditions at the specified time slice, the INTERP function will interpolate according to the settings of the [FILL](../time-series-extensions/#fill-clause) parameter.
- When INTERP is applied to a supertable, it will sort all the subtable data under that supertable by primary key column and perform interpolation calculations, and can also be used with PARTITION BY tbname to force the results to a single timeline. - When INTERP is applied to a supertable, it will sort all the subtable data under that supertable by primary key column and perform interpolation calculations, and can also be used with PARTITION BY tbname to force the results to a single timeline.
- When using INTERP with FILL PREV/NEXT/NEAR modes, its behavior differs from window queries. If data exists at the slice, no FILL operation will be performed, even if the current value is NULL.
- INTERP can be used with the pseudocolumn _irowts to return the timestamp corresponding to the interpolation point (supported from version 3.0.2.0). - INTERP can be used with the pseudocolumn _irowts to return the timestamp corresponding to the interpolation point (supported from version 3.0.2.0).
- INTERP can be used with the pseudocolumn _isfilled to display whether the return result is from the original record or generated by the interpolation algorithm (supported from version 3.0.3.0). - INTERP can be used with the pseudocolumn _isfilled to display whether the return result is from the original record or generated by the interpolation algorithm (supported from version 3.0.3.0).
- INTERP can only use the pseudocolumn `_irowts_origin` when using FILL PREV/NEXT/NEAR modes. `_irowts_origin` is supported from version 3.3.4.9. - INTERP can only use the pseudocolumn `_irowts_origin` when using FILL PREV/NEXT/NEAR modes. `_irowts_origin` is supported from version 3.3.4.9.

View File

@ -84,10 +84,10 @@ The FILL statement specifies the filling mode when data is missing in a window i
1. No filling: NONE (default filling mode). 1. No filling: NONE (default filling mode).
2. VALUE filling: Fixed value filling, where the fill value must be specified. For example: FILL(VALUE, 1.23). Note that the final fill value is determined by the type of the corresponding column, such as FILL(VALUE, 1.23), if the corresponding column is of INT type, then the fill value is 1. If multiple columns in the query list need FILL, then each FILL column must specify a VALUE, such as `SELECT _wstart, min(c1), max(c1) FROM ... FILL(VALUE, 0, 0)`. Note, only ordinary columns in the SELECT expression need to specify FILL VALUE, such as `_wstart`, `_wstart+1a`, `now`, `1+1` and the partition key (like tbname) used with partition by do not need to specify VALUE, like `timediff(last(ts), _wstart)` needs to specify VALUE. 2. VALUE filling: Fixed value filling, where the fill value must be specified. For example: FILL(VALUE, 1.23). Note that the final fill value is determined by the type of the corresponding column, such as FILL(VALUE, 1.23), if the corresponding column is of INT type, then the fill value is 1. If multiple columns in the query list need FILL, then each FILL column must specify a VALUE, such as `SELECT _wstart, min(c1), max(c1) FROM ... FILL(VALUE, 0, 0)`. Note, only ordinary columns in the SELECT expression need to specify FILL VALUE, such as `_wstart`, `_wstart+1a`, `now`, `1+1` and the partition key (like tbname) used with partition by do not need to specify VALUE, like `timediff(last(ts), _wstart)` needs to specify VALUE.
3. PREV filling: Fill data using the previous non-NULL value. For example: FILL(PREV). 3. PREV filling: Fill data using the previous value. For example: FILL(PREV).
4. NULL filling: Fill data with NULL. For example: FILL(NULL). 4. NULL filling: Fill data with NULL. For example: FILL(NULL).
5. LINEAR filling: Perform linear interpolation filling based on the nearest non-NULL values before and after. For example: FILL(LINEAR). 5. LINEAR filling: Perform linear interpolation filling based on the nearest non-NULL values before and after. For example: FILL(LINEAR).
6. NEXT filling: Fill data using the next non-NULL value. For example: FILL(NEXT). 6. NEXT filling: Fill data using the next value. For example: FILL(NEXT).
Among these filling modes, except for the NONE mode which does not fill by default, other modes will be ignored if there is no data in the entire query time range, resulting in no fill data and an empty query result. This behavior is reasonable under some modes (PREV, NEXT, LINEAR) because no data means no fill value can be generated. For other modes (NULL, VALUE), theoretically, fill values can be generated, and whether to output fill values depends on the application's needs. To meet the needs of applications that require forced filling of data or NULL, without breaking the compatibility of existing filling modes, two new filling modes have been added starting from version 3.0.3.0: Among these filling modes, except for the NONE mode which does not fill by default, other modes will be ignored if there is no data in the entire query time range, resulting in no fill data and an empty query result. This behavior is reasonable under some modes (PREV, NEXT, LINEAR) because no data means no fill value can be generated. For other modes (NULL, VALUE), theoretically, fill values can be generated, and whether to output fill values depends on the application's needs. To meet the needs of applications that require forced filling of data or NULL, without breaking the compatibility of existing filling modes, two new filling modes have been added starting from version 3.0.3.0:
@ -112,7 +112,7 @@ The differences between NULL, NULL_F, VALUE, VALUE_F filling modes for different
Time windows can be divided into sliding time windows and tumbling time windows. Time windows can be divided into sliding time windows and tumbling time windows.
The INTERVAL clause is used to generate windows of equal time periods, and SLIDING is used to specify the time the window slides forward. Each executed query is a time window, and the time window slides forward as time flows. When defining continuous queries, it is necessary to specify the size of the time window (time window) and the forward sliding times for each execution. As shown, [t0s, t0e], [t1s, t1e], [t2s, t2e] are the time window ranges for three continuous queries, and the sliding time range is indicated by sliding time. Query filtering, aggregation, and other operations are performed independently for each time window. When SLIDING is equal to INTERVAL, the sliding window becomes a tumbling window. The INTERVAL clause is used to generate windows of equal time periods, and SLIDING is used to specify the time the window slides forward. Each executed query is a time window, and the time window slides forward as time flows. When defining continuous queries, it is necessary to specify the size of the time window (time window) and the forward sliding times for each execution. As shown, [t0s, t0e], [t1s, t1e], [t2s, t2e] are the time window ranges for three continuous queries, and the sliding time range is indicated by sliding time. Query filtering, aggregation, and other operations are performed independently for each time window. When SLIDING is equal to INTERVAL, the sliding window becomes a tumbling window. By default, windows begin at Unix time 0 (1970-01-01 00:00:00 UTC). If interval_offset is specified, the windows start from "Unix time 0 + interval_offset".
<figure> <figure>
<Image img={imgStep01} alt=""/> <Image img={imgStep01} alt=""/>

View File

@ -36,6 +36,7 @@ In this document, it specifically refers to the internal levels of the second-le
| float/double | disabled/delta-d | delta-d | lz4/zlib/zstd/xz/tsz | lz4 | medium | | float/double | disabled/delta-d | delta-d | lz4/zlib/zstd/xz/tsz | lz4 | medium |
| binary/nchar | disabled | disabled | lz4/zlib/zstd/xz | zstd | medium | | binary/nchar | disabled | disabled | lz4/zlib/zstd/xz | zstd | medium |
| bool | disabled/bit-packing | bit-packing | lz4/zlib/zstd/xz | zstd | medium | | bool | disabled/bit-packing | bit-packing | lz4/zlib/zstd/xz | zstd | medium |
| decimal | disabled | disabled | lz4/zlib/zstd/xz | zstd | medium |
## SQL Syntax ## SQL Syntax

View File

@ -682,7 +682,7 @@ The basic API is used to establish database connections and provide a runtime en
- **Interface Description**: Cleans up the runtime environment, should be called before the application exits. - **Interface Description**: Cleans up the runtime environment, should be called before the application exits.
- `int taos_options(TSDB_OPTION option, const void * arg, ...)` - `int taos_options(TSDB_OPTION option, const void * arg, ...)`
- **Interface Description**: Sets client options, currently supports locale (`TSDB_OPTION_LOCALE`), character set (`TSDB_OPTION_CHARSET`), timezone (`TSDB_OPTION_TIMEZONE`), and configuration file path (`TSDB_OPTION_CONFIGDIR`). Locale, character set, and timezone default to the current settings of the operating system. - **Interface Description**: Sets client options, currently supports locale (`TSDB_OPTION_LOCALE`), character set (`TSDB_OPTION_CHARSET`), timezone (`TSDB_OPTION_TIMEZONE`), configuration file path (`TSDB_OPTION_CONFIGDIR`), and driver type (`TSDB_OPTION_DRIVER`). Locale, character set, and timezone default to the current settings of the operating system. The driver type can be either the native interface(`native`) or the WebSocket interface(`websocket`), with the default being `websocket`.
- **Parameter Description**: - **Parameter Description**:
- `option`: [Input] Setting item type. - `option`: [Input] Setting item type.
- `arg`: [Input] Setting item value. - `arg`: [Input] Setting item value.
@ -830,6 +830,12 @@ This section introduces APIs that are all synchronous interfaces. After being ca
- res: [Input] Result set. - res: [Input] Result set.
- **Return Value**: Non-`NULL`: successful, returns a pointer to a TAOS_FIELD structure, each element representing the metadata of a column. `NULL`: failure. - **Return Value**: Non-`NULL`: successful, returns a pointer to a TAOS_FIELD structure, each element representing the metadata of a column. `NULL`: failure.
- `TAOS_FIELD_E *taos_fetch_fields_e(TAOS_RES *res)`
- **Interface Description**: Retrieves the attributes of each column in the query result set (column name, data type, column length). Used in conjunction with `taos_num_fields()`, it can be used to parse the data of a tuple (a row) returned by `taos_fetch_row()`. In addition to the basic information provided by TAOS_FIELD, TAOS_FIELD_E also includes `precision` and `scale` information for the data type.
- **Parameter Description**:
- res: [Input] Result set.
- **Return Value**: Non-`NULL`: Success, returns a pointer to a TAOS_FIELD_E structure, where each element represents the metadata of a column. `NULL`: Failure.
- `void taos_stop_query(TAOS_RES *res)` - `void taos_stop_query(TAOS_RES *res)`
- **Interface Description**: Stops the execution of the current query. - **Interface Description**: Stops the execution of the current query.
- **Parameter Description**: - **Parameter Description**:

View File

@ -121,6 +121,7 @@ Please refer to the specific error codes:
| 0x2378 | consumer create error | Data subscription creation failed, check the error information and taos log for troubleshooting. | | 0x2378 | consumer create error | Data subscription creation failed, check the error information and taos log for troubleshooting. |
| 0x2379 | seek offset must not be a negative number | The seek interface parameter must not be negative, use the correct parameters. | | 0x2379 | seek offset must not be a negative number | The seek interface parameter must not be negative, use the correct parameters. |
| 0x237a | vGroup not found in result set | VGroup not assigned to the current consumer, due to the Rebalance mechanism causing the Consumer and VGroup to be unbound. | | 0x237a | vGroup not found in result set | VGroup not assigned to the current consumer, due to the Rebalance mechanism causing the Consumer and VGroup to be unbound. |
| 0x2390 | background thread write error in Efficient Writing | In the event of an efficient background thread write error, you can stop writing and rebuild the connection. |
- [TDengine Java Connector Error Code](https://github.com/taosdata/taos-connector-jdbc/blob/main/src/main/java/com/taosdata/jdbc/TSDBErrorNumbers.java) - [TDengine Java Connector Error Code](https://github.com/taosdata/taos-connector-jdbc/blob/main/src/main/java/com/taosdata/jdbc/TSDBErrorNumbers.java)
<!-- - [TDengine_ERROR_CODE](../error-code) --> <!-- - [TDengine_ERROR_CODE](../error-code) -->
@ -321,6 +322,14 @@ The configuration parameters in properties are as follows:
- TSDBDriver.PROPERTY_KEY_APP_NAME: App name, can be used for display in the `show connections` query result. Effective only when using WebSocket connections. Default value is java. - TSDBDriver.PROPERTY_KEY_APP_NAME: App name, can be used for display in the `show connections` query result. Effective only when using WebSocket connections. Default value is java.
- TSDBDriver.PROPERTY_KEY_APP_IP: App IP, can be used for display in the `show connections` query result. Effective only when using WebSocket connections. Default value is empty. - TSDBDriver.PROPERTY_KEY_APP_IP: App IP, can be used for display in the `show connections` query result. Effective only when using WebSocket connections. Default value is empty.
- TSDBDriver.PROPERTY_KEY_ASYNC_WRITE: Efficient Writing mode. Currently, only the `stmt` method is supported. Effective only when using WebSocket connections. DeDefault value is empty, meaning Efficient Writing mode is not enabled.
- TSDBDriver.PROPERTY_KEY_BACKEND_WRITE_THREAD_NUM: In Efficient Writing mode, this refers to the number of background write threads. Effective only when using WebSocket connections. Default value is 10.
- TSDBDriver.PROPERTY_KEY_BATCH_SIZE_BY_ROW: In Efficient Writing mode, this is the batch size for writing data, measured in rows. Effective only when using WebSocket connections. Default value is 1000.
- TSDBDriver.PROPERTY_KEY_CACHE_SIZE_BY_ROW: In Efficient Writing mode, this is the cache size, measured in rows. Effective only when using WebSocket connections. Default value is 10000.
- TSDBDriver.PROPERTY_KEY_COPY_DATA: In Efficient Writing mode, this determines Whether to copy the binary data passed by the application through the `addBatch` method. Effective only when using WebSocket connections. Default value is false.
- TSDBDriver.PROPERTY_KEY_STRICT_CHECK: In Efficient Writing mode, this determines whether to validate the length of table names and variable-length data types. Effective only when using WebSocket connections. Default value is false.
- TSDBDriver.PROPERTY_KEY_RETRY_TIMES: In Efficient Writing mode, this is the number of retry attempts for failed write operations. Effective only when using WebSocket connections. Default value is 3.
Additionally, for native JDBC connections, other parameters such as log level and SQL length can be specified by specifying the URL and Properties. Additionally, for native JDBC connections, other parameters such as log level and SQL length can be specified by specifying the URL and Properties.
**Priority of Configuration Parameters** **Priority of Configuration Parameters**

View File

@ -25,6 +25,7 @@ Support all platforms that can run Node.js.
| Node.js Connector Version | Major Changes | TDengine Version | | Node.js Connector Version | Major Changes | TDengine Version |
| ------------------------- | ------------------------------------------------------------------------ | --------------------------- | | ------------------------- | ------------------------------------------------------------------------ | --------------------------- |
| 3.1.5 | Password supports special characters. | - |
| 3.1.4 | Modified the readme.| - | | 3.1.4 | Modified the readme.| - |
| 3.1.3 | Upgraded the es5-ext version to address vulnerabilities in the lower version. | - | | 3.1.3 | Upgraded the es5-ext version to address vulnerabilities in the lower version. | - |
| 3.1.2 | Optimized data protocol and parsing, significantly improved performance. | - | | 3.1.2 | Optimized data protocol and parsing, significantly improved performance. | - |

View File

@ -41,6 +41,8 @@ This document details the server error codes that may be encountered when using
| 0x80000107 | Ref ID is removed | The referenced ref resource has been released | Preserve the scene and logs, report issue on github | | 0x80000107 | Ref ID is removed | The referenced ref resource has been released | Preserve the scene and logs, report issue on github |
| 0x80000108 | Invalid Ref ID | Invalid ref ID | Preserve the scene and logs, report issue on github | | 0x80000108 | Invalid Ref ID | Invalid ref ID | Preserve the scene and logs, report issue on github |
| 0x8000010A | Ref is not there | ref information does not exist | Preserve the scene and logs, report issue on github | | 0x8000010A | Ref is not there | ref information does not exist | Preserve the scene and logs, report issue on github |
| 0x8000010B | Driver was not loaded | libtaosnative.so or libtaosws.so was not found in the system path | Reinstall the client driver |
| 0x8000010C | Function was not loaded from the driver | some function defined in libtaos.so are not implemented in libtaosnative.so or libtaosws.so | Reinstall the client driver |
| 0x80000110 | Unexpected generic error | System internal error | Preserve the scene and logs, report issue on github | | 0x80000110 | Unexpected generic error | System internal error | Preserve the scene and logs, report issue on github |
| 0x80000111 | Action in progress | Operation in progress | 1. Wait for the operation to complete 2. Cancel the operation if necessary 3. If it exceeds a reasonable time and still not completed, preserve the scene and logs, or contact customer support | | 0x80000111 | Action in progress | Operation in progress | 1. Wait for the operation to complete 2. Cancel the operation if necessary 3. If it exceeds a reasonable time and still not completed, preserve the scene and logs, or contact customer support |
| 0x80000112 | Out of range | Configuration parameter exceeds allowed value range | Change the parameter | | 0x80000112 | Out of range | Configuration parameter exceeds allowed value range | Change the parameter |
@ -560,9 +562,12 @@ This document details the server error codes that may be encountered when using
## virtual table ## virtual table
| Error Code | Description | Possible Error Scenarios or Reasons | Recommended Actions for Users | | Error Code | Description | Possible Error Scenarios or Reasons | Recommended Actions for Users |
|------------|---------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------| |------------|---------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------|
| 0x80006200 | Virtual table scan internal error | virtual table scan operator internal error, generally does not occur | Check error logs, contact development for handling | | 0x80006200 | Virtual table scan internal error | virtual table scan operator internal error, generally does not occur | Check error logs, contact development for handling |
| 0x80006201 | Virtual table scan invalid downstream operator type | The incorrect execution plan generated causes the downstream operator type of the virtual table scan operator to be incorrect. | Check error logs, contact development for handling | | 0x80006201 | Virtual table scan invalid downstream operator type | The incorrect execution plan generated causes the downstream operator type of the virtual table scan operator to be incorrect. | Check error logs, contact development for handling |
| 0x80006202 | Virtual table prim timestamp column should not has ref | The timestamp primary key column of a virtual table should not have a data source. If it does, this error will occur during subsequent queries on the virtual table. | Check error logs, contact development for handling | | 0x80006202 | Virtual table prim timestamp column should not has ref | The timestamp primary key column of a virtual table should not have a data source. If it does, this error will occur during subsequent queries on the virtual table. | Check error logs, contact development for handling |
| 0x80006203 | Create virtual child table must use virtual super table | Create virtual child table using non-virtual super table | create virtual child table using virtual super table | | 0x80006203 | Create virtual child table must use virtual super table | Create virtual child table using non-virtual super table | create virtual child table using virtual super table |
| 0x80006204 | Virtual table not support decimal type | Create virtual table using decimal type | create virtual table without using decimal type | | 0x80006204 | Virtual table not support decimal type | Create virtual table using decimal type | create virtual table without using decimal type |
| 0x80006205 | Virtual table not support in STMT query and STMT insert | Use virtual table in stmt query and stmt insert | do not use virtual table in stmt query and insert |
| 0x80006206 | Virtual table not support in Topic | Use virtual table in topic | do not use virtual table in topic |
| 0x80006206 | Virtual super table query not support origin table from different databases | Virtual super table s child table's origin table from different databases | make sure virtual super table's child table's origin table from same database |

View File

@ -9,6 +9,7 @@ TARGETS = connect_example \
with_reqid_demo \ with_reqid_demo \
sml_insert_demo \ sml_insert_demo \
stmt_insert_demo \ stmt_insert_demo \
stmt2_insert_demo \
tmq_demo tmq_demo
SOURCES = connect_example.c \ SOURCES = connect_example.c \
@ -18,6 +19,7 @@ SOURCES = connect_example.c \
with_reqid_demo.c \ with_reqid_demo.c \
sml_insert_demo.c \ sml_insert_demo.c \
stmt_insert_demo.c \ stmt_insert_demo.c \
stmt2_insert_demo.c \
tmq_demo.c tmq_demo.c
LIBS = -ltaos -lpthread LIBS = -ltaos -lpthread

View File

@ -0,0 +1,204 @@
/*
* Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
*
* This program is free software: you can use, redistribute, and/or modify
* it under the terms of the GNU Affero General Public License, version 3
* or later ("AGPL"), as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// TAOS standard API example. The same syntax as MySQL, but only a subset
// to compile: gcc -o stmt2_insert_demo stmt2_insert_demo.c -ltaos
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/time.h>
#include "taos.h"
#define NUM_OF_SUB_TABLES 10
#define NUM_OF_ROWS 10
/**
* @brief Executes an SQL query and checks for errors.
*
* @param taos Pointer to TAOS connection.
* @param sql SQL query string.
*/
void executeSQL(TAOS *taos, const char *sql) {
TAOS_RES *res = taos_query(taos, sql);
int code = taos_errno(res);
if (code != 0) {
fprintf(stderr, "Error: %s\n", taos_errstr(res));
taos_free_result(res);
taos_close(taos);
exit(EXIT_FAILURE);
}
taos_free_result(res);
}
/**
* @brief Checks return status and exits if an error occurs.
*
* @param stmt2 Pointer to TAOS_STMT2.
* @param code Error code.
* @param msg Error message prefix.
*/
void checkErrorCode(TAOS_STMT2 *stmt2, int code, const char *msg) {
if (code != 0) {
fprintf(stderr, "%s. Code: %d, Error: %s\n", msg, code, taos_stmt2_error(stmt2));
taos_stmt2_close(stmt2);
exit(EXIT_FAILURE);
}
}
/**
* @brief Prepares data bindings for batch insertion.
*
* @param table_name Pointer to store allocated table names.
* @param tags Pointer to store allocated tag bindings.
* @param params Pointer to store allocated parameter bindings.
*/
void prepareBindData(char ***table_name, TAOS_STMT2_BIND ***tags, TAOS_STMT2_BIND ***params) {
*table_name = (char **)malloc(NUM_OF_SUB_TABLES * sizeof(char *));
*tags = (TAOS_STMT2_BIND **)malloc(NUM_OF_SUB_TABLES * sizeof(TAOS_STMT2_BIND *));
*params = (TAOS_STMT2_BIND **)malloc(NUM_OF_SUB_TABLES * sizeof(TAOS_STMT2_BIND *));
for (int i = 0; i < NUM_OF_SUB_TABLES; i++) {
// Allocate and assign table name
(*table_name)[i] = (char *)malloc(20 * sizeof(char));
sprintf((*table_name)[i], "d_bind_%d", i);
// Allocate memory for tags data
int *gid = (int *)malloc(sizeof(int));
int *gid_len = (int *)malloc(sizeof(int));
*gid = i;
*gid_len = sizeof(int);
char *location = (char *)malloc(20 * sizeof(char));
int *location_len = (int *)malloc(sizeof(int));
*location_len = sprintf(location, "location_%d", i);
(*tags)[i] = (TAOS_STMT2_BIND *)malloc(2 * sizeof(TAOS_STMT2_BIND));
(*tags)[i][0] = (TAOS_STMT2_BIND){TSDB_DATA_TYPE_INT, gid, gid_len, NULL, 1};
(*tags)[i][1] = (TAOS_STMT2_BIND){TSDB_DATA_TYPE_BINARY, location, location_len, NULL, 1};
// Allocate memory for columns data
(*params)[i] = (TAOS_STMT2_BIND *)malloc(4 * sizeof(TAOS_STMT2_BIND));
int64_t *ts = (int64_t *)malloc(NUM_OF_ROWS * sizeof(int64_t));
float *current = (float *)malloc(NUM_OF_ROWS * sizeof(float));
int *voltage = (int *)malloc(NUM_OF_ROWS * sizeof(int));
float *phase = (float *)malloc(NUM_OF_ROWS * sizeof(float));
int32_t *ts_len = (int32_t *)malloc(NUM_OF_ROWS * sizeof(int32_t));
int32_t *current_len = (int32_t *)malloc(NUM_OF_ROWS * sizeof(int32_t));
int32_t *voltage_len = (int32_t *)malloc(NUM_OF_ROWS * sizeof(int32_t));
int32_t *phase_len = (int32_t *)malloc(NUM_OF_ROWS * sizeof(int32_t));
(*params)[i][0] = (TAOS_STMT2_BIND){TSDB_DATA_TYPE_TIMESTAMP, ts, ts_len, NULL, NUM_OF_ROWS};
(*params)[i][1] = (TAOS_STMT2_BIND){TSDB_DATA_TYPE_FLOAT, current, current_len, NULL, NUM_OF_ROWS};
(*params)[i][2] = (TAOS_STMT2_BIND){TSDB_DATA_TYPE_INT, voltage, voltage_len, NULL, NUM_OF_ROWS};
(*params)[i][3] = (TAOS_STMT2_BIND){TSDB_DATA_TYPE_FLOAT, phase, phase_len, NULL, NUM_OF_ROWS};
for (int j = 0; j < NUM_OF_ROWS; j++) {
struct timeval tv;
gettimeofday(&tv, NULL);
ts[j] = tv.tv_sec * 1000LL + tv.tv_usec / 1000 + j;
current[j] = (float)rand() / RAND_MAX * 30;
voltage[j] = rand() % 300;
phase[j] = (float)rand() / RAND_MAX;
ts_len[j] = sizeof(int64_t);
current_len[j] = sizeof(float);
voltage_len[j] = sizeof(int);
phase_len[j] = sizeof(float);
}
}
}
/**
* @brief Frees allocated memory for binding data.
*
* @param table_name Pointer to allocated table names.
* @param tags Pointer to allocated tag bindings.
* @param params Pointer to allocated parameter bindings.
*/
void freeBindData(char ***table_name, TAOS_STMT2_BIND ***tags, TAOS_STMT2_BIND ***params) {
for (int i = 0; i < NUM_OF_SUB_TABLES; i++) {
free((*table_name)[i]);
for (int j = 0; j < 2; j++) {
free((*tags)[i][j].buffer);
free((*tags)[i][j].length);
}
free((*tags)[i]);
for (int j = 0; j < 4; j++) {
free((*params)[i][j].buffer);
free((*params)[i][j].length);
}
free((*params)[i]);
}
free(*table_name);
free(*tags);
free(*params);
}
/**
* @brief Inserts data using the TAOS stmt2 API.
*
* @param taos Pointer to TAOS connection.
*/
void insertData(TAOS *taos) {
TAOS_STMT2_OPTION option = {0, false, false, NULL, NULL};
TAOS_STMT2 *stmt2 = taos_stmt2_init(taos, &option);
if (!stmt2) {
fprintf(stderr, "Failed to initialize TAOS statement.\n");
exit(EXIT_FAILURE);
}
// stmt2 prepare sql
checkErrorCode(stmt2, taos_stmt2_prepare(stmt2, "INSERT INTO ? USING meters TAGS(?,?) VALUES (?,?,?,?)", 0),
"Statement preparation failed");
char **table_name;
TAOS_STMT2_BIND **tags, **params;
prepareBindData(&table_name, &tags, &params);
// stmt2 bind batch
TAOS_STMT2_BINDV bindv = {NUM_OF_SUB_TABLES, table_name, tags, params};
checkErrorCode(stmt2, taos_stmt2_bind_param(stmt2, &bindv, -1), "Parameter binding failed");
// stmt2 exec batch
int affected;
checkErrorCode(stmt2, taos_stmt2_exec(stmt2, &affected), "Execution failed");
printf("Successfully inserted %d rows.\n", affected);
// free and close
freeBindData(&table_name, &tags, &params);
taos_stmt2_close(stmt2);
}
int main() {
const char *host = "localhost";
const char *user = "root";
const char *password = "taosdata";
uint16_t port = 6030;
TAOS *taos = taos_connect(host, user, password, NULL, port);
if (taos == NULL) {
fprintf(stderr, "Failed to connect to %s:%hu, ErrCode: 0x%x, ErrMessage: %s.\n", host, port, taos_errno(NULL),
taos_errstr(NULL));
taos_cleanup();
exit(EXIT_FAILURE);
}
// create database and table
executeSQL(taos, "CREATE DATABASE IF NOT EXISTS power");
executeSQL(taos, "USE power");
executeSQL(taos,
"CREATE STABLE IF NOT EXISTS power.meters (ts TIMESTAMP, current FLOAT, voltage INT, phase FLOAT) TAGS "
"(groupId INT, location BINARY(24))");
insertData(taos);
taos_close(taos);
taos_cleanup();
}

View File

@ -4,7 +4,7 @@
"main": "index.js", "main": "index.js",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@tdengine/websocket": "^3.1.2" "@tdengine/websocket": "^3.1.5"
}, },
"scripts": { "scripts": {
"test": "echo \"Error: no test specified\" && exit 1" "test": "echo \"Error: no test specified\" && exit 1"

View File

@ -1,4 +1,3 @@
const { sleep } = require("@tdengine/websocket");
const taos = require("@tdengine/websocket"); const taos = require("@tdengine/websocket");
// ANCHOR: create_consumer // ANCHOR: create_consumer
@ -52,6 +51,12 @@ async function prepare() {
await wsSql.close(); await wsSql.close();
} }
const delay = function(ms) {
return new Promise(function(resolve) {
setTimeout(resolve, ms);
});
};
async function insert() { async function insert() {
let conf = new taos.WSConfig('ws://localhost:6041'); let conf = new taos.WSConfig('ws://localhost:6041');
conf.setUser('root'); conf.setUser('root');
@ -60,7 +65,7 @@ async function insert() {
let wsSql = await taos.sqlConnect(conf); let wsSql = await taos.sqlConnect(conf);
for (let i = 0; i < 50; i++) { for (let i = 0; i < 50; i++) {
await wsSql.exec(`INSERT INTO d1001 USING ${stable} (location, groupId) TAGS ("California.SanFrancisco", 3) VALUES (NOW, ${10 + i}, ${200 + i}, ${0.32 + i})`); await wsSql.exec(`INSERT INTO d1001 USING ${stable} (location, groupId) TAGS ("California.SanFrancisco", 3) VALUES (NOW, ${10 + i}, ${200 + i}, ${0.32 + i})`);
await sleep(100); await delay(100);
} }
await wsSql.close(); await wsSql.close();
} }

View File

@ -1,4 +1,3 @@
const { sleep } = require("@tdengine/websocket");
const taos = require("@tdengine/websocket"); const taos = require("@tdengine/websocket");
const db = 'power'; const db = 'power';

View File

@ -0,0 +1,207 @@
# ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
# ┃ ██████ ██████ ██████ █ █ █ █ █ █▄ ▀███ █ ┃
# ┃ ▄▄▄▄▄█ █▄▄▄▄▄ ▄▄▄▄▄█ ▀▀▀▀▀█▀▀▀▀▀ █ ▀▀▀▀▀█ ████████▌▐███ ███▄ ▀█ █ ▀▀▀▀▀ ┃
# ┃ █▀▀▀▀▀ █▀▀▀▀▀ █▀██▀▀ ▄▄▄▄▄ █ ▄▄▄▄▄█ ▄▄▄▄▄█ ████████▌▐███ █████▄ █ ▄▄▄▄▄ ┃
# ┃ █ ██████ █ ▀█▄ █ ██████ █ ███▌▐███ ███████▄ █ ┃
# ┣━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┫
# ┃ Copyright (c) 2017, the Perspective Authors. ┃
# ┃ ╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌ ┃
# ┃ This file is part of the Perspective library, distributed under the terms ┃
# ┃ of the [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0). ┃
# ┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
import logging
import tornado.websocket
import tornado.web
import tornado.ioloop
from datetime import date, datetime
import perspective
import perspective.handlers.tornado
import json
import taosws
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger('main')
# =============================================================================
# TDengine connection parameters
# =============================================================================
TAOS_HOST = "localhost" # TDengine server host
TAOS_PORT = 6041 # TDengine server port
TAOS_USER = "root" # TDengine username
TAOS_PASSWORD = "taosdata" # TDengine password
TAOS_DATABASE = "power" # TDengine database name
TAOS_TABLENAME = "meters" # TDengine table name
# =============================================================================
# Perspective server parameters
# =============================================================================
PERSPECTIVE_TABLE_NAME = "meters_values" # name of the Perspective table
PERSPECTIVE_REFRESH_RATE = 250 # refresh rate in milliseconds
class CustomJSONEncoder(json.JSONEncoder):
"""
Custom JSON encoder that serializes datetime and date objects
"""
def default(self, obj):
if isinstance(obj, datetime):
return obj.isoformat()
elif isinstance(obj, date):
return obj.isoformat()
return super().default(obj)
json.JSONEncoder.default = CustomJSONEncoder().default
def convert_ts(ts) -> datetime:
"""
Convert a timestamp string to a datetime object
"""
for fmt in ('%Y-%m-%d %H:%M:%S.%f %z', '%Y-%m-%d %H:%M:%S %z'):
try:
return datetime.strptime(ts, fmt)
except ValueError:
continue
raise ValueError(f"Time data '{ts}' does not match any format")
def create_tdengine_connection(
host: str = TAOS_HOST,
port: int = TAOS_PORT,
user: str = TAOS_USER,
password: str = TAOS_PASSWORD,
) -> taosws.Connection:
try:
# connect to the tdengine server
conn = taosws.connect(
user=user,
password=password,
host=host,
port=port,
)
# switch to the right database
conn.execute(f"USE {TAOS_DATABASE}")
# connection successful
logger.info(f"Connected to tdengine successfully: {host}:{port}")
return conn
except Exception as err:
logger.error(f"Failed to connect to tdengine: {host}:{port} -- ErrMessage: {err}")
raise err
def read_tdengine(
conn: taosws.Connection,
) -> list[dict]:
try:
# query the database
sql = f"""
SELECT `ts`, location, groupid, current, voltage, phase
FROM {TAOS_TABLENAME}
WHERE `ts` >= NOW() - 12h
ORDER BY `ts` DESC
LIMIT 1000
"""
logger.debug(f"Executing query: {sql}")
res = conn.query(sql)
data = [
{
"timestamp": convert_ts(row[0]),
"location": row[1],
"groupid": row[2],
"current": row[3],
"voltage": row[4],
"phase": row[5],
}
for row in res
]
logger.info(f"select result: {data}")
return data
except Exception as err:
logger.error(f"Failed to query tdengine: {err}")
raise err
// ANCHOR: perspective_server
def perspective_thread(perspective_server: perspective.Server, tdengine_conn: taosws.Connection):
"""
Create a new Perspective table and update it with new data every 50ms
"""
# create a new Perspective table
client = perspective_server.new_local_client()
schema = {
"timestamp": datetime,
"location": str,
"groupid": int,
"current": float,
"voltage": int,
"phase": float,
}
# define the table schema
table = client.table(
schema,
limit=1000, # maximum number of rows in the table
name=PERSPECTIVE_TABLE_NAME, # table name. Use this with perspective-viewer on the client side
)
logger.info("Created new Perspective table")
# update with new data
def updater():
data = read_tdengine(tdengine_conn)
table.update(data)
logger.debug(f"Updated Perspective table: {len(data)} rows")
logger.info(f"Starting tornado ioloop update loop every {PERSPECTIVE_REFRESH_RATE} milliseconds")
# start the periodic callback to update the table data
callback = tornado.ioloop.PeriodicCallback(callback=updater, callback_time=PERSPECTIVE_REFRESH_RATE)
callback.start()
// ANCHOR_END: perspective_server
def make_app(perspective_server):
"""
Create a new Tornado application with a websocket handler that
serves a Perspective table. PerspectiveTornadoHandler handles
the websocket connection and streams the Perspective table changes
to the client.
"""
return tornado.web.Application([
(
r"/websocket", # websocket endpoint. Use this URL to configure the websocket client OR Prospective Server adapter
perspective.handlers.tornado.PerspectiveTornadoHandler, # PerspectiveTornadoHandler handles perspective table updates <-> websocket client
{"perspective_server": perspective_server}, # pass the perspective server to the handler
),
])
if __name__ == "__main__":
logger.info("TDEngine <-> Perspective Demo")
# create a new Perspective server
logger.info("Creating new Perspective server")
perspective_server = perspective.Server()
# create the tdengine connection
logger.info("Creating new TDEngine connection")
tdengine_conn = create_tdengine_connection()
# setup and start the Tornado app
logger.info("Creating Tornado server")
app = make_app(perspective_server)
app.listen(8085, address='0.0.0.0')
logger.info("Listening on http://localhost:8080")
try:
# start the io loop
logger.info("Starting ioloop to update Perspective table data via tornado websocket...")
loop = tornado.ioloop.IOLoop.current()
loop.call_later(0, perspective_thread, perspective_server, tdengine_conn)
loop.start()
except KeyboardInterrupt:
logger.warning("Keyboard interrupt detected. Shutting down tornado server...")
loop.stop()
loop.close()
logging.info("Shut down")

View File

@ -0,0 +1,135 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Perspective Viewer Dashboard</title>
<link rel="stylesheet" crossorigin="anonymous"
href="https://unpkg.com/@finos/perspective-viewer/dist/css/themes.css"/>
<style>
/* define the layout of the entire dashboard */
#dashboard {
display: grid;
/* define a grid layout with two rows and two columns */
grid-template-columns: 1fr 1fr;
grid-template-rows: auto auto auto;
gap: 20px;
padding: 20px;
/* limit the maximum height of the Dashboard to the viewport height */
max-height: 100vh;
box-sizing: border-box;
}
/* define the style */
.viewer-container {
/* adjust the height of the container to ensure it can be displayed on one screen */
height: calc((100vh - 30px) / 2);
width: 100%;
display: flex;
justify-content: center;
align-items: center;
background-color: #333;
border-radius: 8px;
overflow: hidden;
}
perspective-viewer {
height: 100%;
width: 100%;
}
body {
background-color: #242526;
color: white;
font-family: Arial, sans-serif;
margin: 0;
}
</style>
</head>
<body>
<!-- introduce JavaScript files related to Perspective Viewer -->
<script type="module" src="https://unpkg.com/@finos/perspective@3.1.3/dist/cdn/perspective.js"></script>
<script type="module" src="https://unpkg.com/@finos/perspective-viewer@3.1.3/dist/cdn/perspective-viewer.js"></script>
<script type="module"
src="https://unpkg.com/@finos/perspective-viewer-datagrid@3.1.3/dist/cdn/perspective-viewer-datagrid.js"></script>
<script type="module"
src="https://unpkg.com/@finos/perspective-viewer-d3fc@3.1.3/dist/cdn/perspective-viewer-d3fc.js"></script>
// ANCHOR: perspective_viewer
<script type="module">
// import the Perspective library
import perspective from "https://unpkg.com/@finos/perspective@3.1.3/dist/cdn/perspective.js";
document.addEventListener("DOMContentLoaded", async function () {
// an asynchronous function for loading the view
async function load_viewer(viewerId, config) {
try {
const table_name = "meters_values";
const viewer = document.getElementById(viewerId);
// connect WebSocket server
const websocket = await perspective.websocket("ws://localhost:8085/websocket");
// open server table
const server_table = await websocket.open_table(table_name);
// load the table into the view
await viewer.load(server_table);
// use view configuration
await viewer.restore(config);
} catch (error) {
console.error('发生错误:', error);
}
}
// configuration of the view
const config1 = {
"version": "3.3.1", // Perspective library version (compatibility identifier)
"plugin": "Datagrid", // View mode: Datagrid (table) or D3FC (chart)
"plugin_config": { // Plugin-specific configuration
"columns": {
"current": {
"width": 150 // Column width in pixels
}
},
"edit_mode": "READ_ONLY", // Edit mode: READ_ONLY (immutable) or EDIT (editable)
"scroll_lock": false // Whether to lock scroll position
},
"columns_config": {}, // Custom column configurations (colors, formatting, etc.)
"settings": true, // Whether to show settings panel (true/false)
"theme": "Power Meters", // Custom theme name (must be pre-defined)
"title": "Meters list data", // View title
"group_by": ["location", "groupid"], // Row grouping fields (equivalent to `row_pivots`)
"split_by": [], // Column grouping fields (equivalent to `column_pivots`)
"columns": [ // Columns to display (in order)
"timestamp",
"location",
"current",
"voltage",
"phase"
],
"filter": [], // Filter conditions (triplet format array)
"sort": [], // Sorting rules (format: [field, direction])
"expressions": {}, // Custom expressions (e.g., calculated columns)
"aggregates": { // Aggregation function configuration
"timestamp": "last", // Aggregation: last (takes the latest value)
"voltage": "last", // Aggregation: last
"phase": "last", // Aggregation: last
"current": "last" // Aggregation: last
}
};
// load the first view
await load_viewer("prsp-viewer-1", config1);
});
</script>
<!-- define the HTML Structure of the Dashboard -->
<div id="dashboard">
<div class="viewer-container">
<perspective-viewer id="prsp-viewer-1" theme="Pro Dark"></perspective-viewer>
</div>
</div>
// ANCHOR_END: perspective_viewer
</body>
</html>

View File

@ -182,7 +182,7 @@ INTERVAL(interval_val [, interval_offset])
``` ```
时间窗口子句包括 3 个子句: 时间窗口子句包括 3 个子句:
- INTERVAL 子句用于产生相等时间周期的窗口interval_val 指定每个时间窗口的大小interval_offset 指定窗口偏移量; - INTERVAL 子句用于产生相等时间周期的窗口interval_val 指定每个时间窗口的大小interval_offset 指定窗口偏移量;默认情况下,窗口是从 Unix time 01970-01-01 00:00:00 UTC开始划分的如果设置了 interval_offset那么窗口的划分将从 “Unix time 0 + interval_offset” 开始;
- SLIDING 子句:用于指定窗口向前滑动的时间; - SLIDING 子句:用于指定窗口向前滑动的时间;
- FILL用于指定窗口区间数据缺失的情况下数据的填充模式。 - FILL用于指定窗口区间数据缺失的情况下数据的填充模式。

View File

@ -141,9 +141,20 @@ stmt 绑定参数的示例代码如下:
``` ```
</TabItem> </TabItem>
<TabItem label="C" value="c"> <TabItem label="C" value="c">
stmt2 绑定参数的示例代码如下(需要 TDengine v3.3.5.0 及以上):
```c
{{#include docs/examples/c/stmt2_insert_demo.c}}
```
stmt 绑定参数的示例代码如下:
```c ```c
{{#include docs/examples/c/stmt_insert_demo.c}} {{#include docs/examples/c/stmt_insert_demo.c}}
``` ```
</TabItem> </TabItem>
<TabItem label="REST API" value="rest"> <TabItem label="REST API" value="rest">
不支持 不支持

View File

@ -56,7 +56,7 @@ dataDir /mnt/data6 2 0
一般情况下,当 TDengine 要从同级挂载点中选择一个用于生成新的数据文件时,采用 round robin 策略进行选择。但现实中有可能每个磁盘的容量不相同,或者容量相同但写入的数据量不相同,这就导致会出现每个磁盘上的可用空间不均衡,在实际进行选择时有可能会选择到一个剩余空间已经很小的磁盘。 一般情况下,当 TDengine 要从同级挂载点中选择一个用于生成新的数据文件时,采用 round robin 策略进行选择。但现实中有可能每个磁盘的容量不相同,或者容量相同但写入的数据量不相同,这就导致会出现每个磁盘上的可用空间不均衡,在实际进行选择时有可能会选择到一个剩余空间已经很小的磁盘。
为了解决这个问题,从 3.1.1.0 开始引入了一个新的配置 minDiskFreeSize当某块磁盘上的可用空间小于等于这个阈值时该磁盘将不再被选择用于生成新的数据文件。该配置项的单位为字节其值应该大于 2GB会跳过可用空间小于 2GB 的挂载点。 为了解决这个问题,从 3.1.1.0 开始引入了一个新的配置 minDiskFreeSize当某块磁盘上的可用空间小于等于这个阈值时该磁盘将不再被选择用于生成新的数据文件。该配置项的单位为字节若配置值大于 2GB会跳过可用空间小于 2GB 的挂载点。
从 3.3.2.0 版本开始,引入了一个新的配置 disable_create_new_file用于控制在某个挂载点上禁止生成新文件其缺省值为 false即每个挂载点上默认都可以生成新文件。 从 3.3.2.0 版本开始,引入了一个新的配置 disable_create_new_file用于控制在某个挂载点上禁止生成新文件其缺省值为 false即每个挂载点上默认都可以生成新文件。

View File

@ -0,0 +1,274 @@
---
sidebar_label: 安全配置
title: 安全配置
toc_max_heading_level: 4
---
## 背景
TDengine 的分布式、多组件特性导致 TDengine 的安全配置是生产系统中比较关注的问题。本文档旨在对 TDengine 各组件及在不同部署方式下的安全问题进行说明,并提供部署和配置建议,为用户的数据安全提供支持。
## 安全配置涉及组件
TDengine 包含多个组件,有:
- `taosd` 内核组件。
- `taosc` 客户端库。
- `taosAdapter` REST API 和 WebSocket 服务。
- `taosKeeper`:监控服务组件。
- `taosX`:数据管道和备份恢复组件。
- `taosxAgent`:外部数据源数据接入辅助组件。
- `taosExplorer`Web 可视化管理界面。
与 TDengine 部署和应用相关,还会存在以下组件:
- 通过各种连接器接入并使用 TDengine 数据库的应用。
- 外部数据源:指接入 TDengine 的其他数据源,如 MQTT、OPC、Kafka 等。
各组件关系如下:
![TDengine 产品生态拓扑架构](./tdengine-topology.png)
关于各组件的详细介绍,请参考 [组件介绍](./intro)。
## TDengine 安全设置
### `taosd`
taosd 集群间使用 TCP 连接基于自有协议进行数据交换,风险较低,但传输过程不是加密的,仍有一定安全风险。
启用压缩可能对 TCP 数据混淆有帮助。
- **compressMsgSize**:是否对 RPC 消息进行压缩,整数,可选:-1所有消息都不压缩0所有消息都压缩N (N>0):只有大于 N 个字节的消息才压缩。
为了保证数据库操作可追溯,建议启用审计功能。
- **audit**审计功能开关0 为关1 为开。默认打开。
- **auditInterval**:上报间隔,单位为毫秒。默认 5000。
- **auditCreateTable**:是否针对创建子表开启申计功能。 0 为关1 为开。默认打开。
为保证数据文件安全,可启用数据库加密。
- **encryptAlgorithm**:数据加密算法。
- **encryptScope**:数据加密范围。
启用白名单可限制访问地址,进一步增强私密性。
- **enableWhiteList**白名单功能开关0 为关, 1 为开;默认关闭。
### `taosc`
用户和其他组件与 `taosd` 之间使用原生客户端库taosc和自有协议进行连接数据安全风险较低但传输过程仍然不是加密的有一定安全风险。
### `taosAdapter`
taosadapter 与 taosd 之间使用原生客户端库taosc和自有协议进行连接同样支持 RPC 消息压缩,不会造成数据安全问题。
应用和其他组件通过各语言连接器与 taosadapter 进行连接。默认情况下,连接是基于 HTTP 1.1 且不加密的。要保证 taosadapter 与其他组件之间的数据传输安全,需要配置 SSL 加密连接。在 `/etc/taos/taosadapter.toml` 配置文件中修改如下配置:
```toml
[ssl]
enable = true
certFile = "/path/to/certificate-file"
keyFile = "/path/to/private-key"
```
在连接器中配置 HTTPS/SSL 访问方式,完成加密访问。
为进一步增强安全性,可启用白名单功能,在 `taosd` 中配置,对 taosdapter 组件同样生效。
### `taosX`
`taosX` 对外包括 REST API 接口和 gRPC 接口,其中 gRPC 接口用于 taos-agent 连接。
- REST API 接口是基于 HTTP 1.1 且不加密的,有安全风险。
- gRPC 接口基于 HTTP 2 且不加密,有安全风险 。
为了保证数据安全,建议 taosX API 接口仅限内部访问。在 `/etc/taos/taosx.toml` 配置文件中修改如下配置:
```toml
[serve]
listen = "127.0.0.1:6050"
grpc = "127.0.0.1:6055"
```
从 TDengine 3.3.6.0 开始taosX 支持 HTTPS 连接,在 `/etc/taos/taosx.toml` 文件中添加如下配置:
```toml
[serve]
ssl_cert = "/path/to/server.pem"
ssl_key = "/path/to/server.key"
ssl_ca = "/path/to/ca.pem"
```
并在 Explorer 中修改 API 地址为 HTTPS 连接:
```toml
# taosX API 本地连接
x_api = "https://127.0.01:6050"
# Public IP 或者域名地址
grpc = "https://public.domain.name:6055"
```
### `taosExplorer`
`taosAdapter` 组件相似,`taosExplorer` 组件提供 HTTP 服务对外访问。在 `/etc/taos/explorer.toml` 配置文件中修改如下配置:
```toml
[ssl]
# SSL certificate file
certificate = "/path/to/ca.file"
# SSL certificate private key
certificate_key = "/path/to/key.file"
```
之后,使用 HTTPS 进行 Explorer 访问,如 [https://192.168.12.34](https://192.168.12.34:6060) 。
### `taosxAgent`
taosX 启用 HTTPS 后Agent 组件与 taosx 之间使用 HTTP 2 加密连接,使用 Arrow-Flight RPC 进行数据交换,传输内容是二进制格式,且仅注册过的 Agent 连接有效,保障数据安全。
建议在不安全网络或公共网络环境下的 Agent 服务,始终开启 HTTPS 连接。
### `taosKeeper`
taosKeeper 使用 WebSocket 连接与 taosadpater 通信,将其他组件上报的监控信息写入 TDengine。
`taosKeeper` 当前版本存在安全风险:
- 监控地址不可限制在本机,默认监控 所有地址的 6043 端口,存在网络攻击风险。使用 Docker 或 Kubernetes 部署不暴露 taosKeeper 端口时,此风险可忽略。
- 配置文件中配置明文密码,需要降低配置文件可见性。在 `/etc/taos/taoskeeper.toml` 中存在:
```toml
[tdengine]
host = "localhost"
port = 6041
username = "root"
password = "taosdata"
usessl = false
```
## 安全增强
我们建议使用在局域网内部使用 TDengine。
如果必须在局域网外部提供访问,请考虑添加以下配置:
### 负载均衡
使用负载均衡对外提供 taosAdapter 服务。
以 Nginx 为例,配置多节点负载均衡:
```nginx
http {
server {
listen 6041;
location / {
proxy_pass http://websocket;
# Headers for websocket compatible
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
# Forwarded headers
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-Port $server_port;
proxy_set_header X-Forwarded-Server $hostname;
proxy_set_header X-Real-IP $remote_addr;
}
}
upstream websocket {
server 192.168.11.61:6041;
server 192.168.11.62:6041;
server 192.168.11.63:6041;
}
}
```
如果 taosAdapter 组件未配置 SSL 安全连接,还需要配置 SSL 才能保证安全访问。SSL 可以配置在更上层的 API Gateway也可以配置在 Nginx 中;如果你对各组件之间的安全性有更强的要求,您可以在所有组件中都配置 SSL。Nginx 配置如下:
```nginx
http {
server {
listen 443 ssl;
ssl_certificate /path/to/your/certificate.crt;
ssl_certificate_key /path/to/your/private.key;
}
}
```
### 安全网关
在现在互联网生产系统中,安全网关使用也很普遍。[traefik](https://traefik.io/) 是一个很好的开源选择,我们以 traefik 为例,解释在 API 网关中的安全配置。
Traefik 中通过 middleware 中间件提供多种安全配置,包括:
1. 认证AuthenticationTraefik 提供 BasicAuth、DigestAuth、自定义认证中间件、OAuth 2.0 等多种认证方式。
2. IP 白名单IPWhitelist限制允许访问的客户端 IP。
3. 频率限制RateLimit控制发送到服务的请求数。
4. 自定义 Headers通过自定义 Headers 添加 `allowedHosts` 等配置,提高安全性。
一个常见的中间件示例如下:
```yaml
labels:
- "traefik.enable=true"
- "traefik.http.routers.tdengine.rule=Host(`api.tdengine.example.com`)"
- "traefik.http.routers.tdengine.entrypoints=https"
- "traefik.http.routers.tdengine.tls.certresolver=default"
- "traefik.http.routers.tdengine.service=tdengine"
- "traefik.http.services.tdengine.loadbalancer.server.port=6041"
- "traefik.http.middlewares.redirect-to-https.redirectscheme.scheme=https"
- "traefik.http.middlewares.check-header.headers.customrequestheaders.X-Secret-Header=SecretValue"
- "traefik.http.middlewares.check-header.headers.customresponseheaders.X-Header-Check=true"
- "traefik.http.middlewares.tdengine-ipwhitelist.ipwhitelist.sourcerange=127.0.0.1/32, 192.168.1.7"
- "traefik.http.routers.tdengine.middlewares=redirect-to-https,check-header,tdengine-ipwhitelist"
```
上面的示例完成以下配置:
- TLS 认证使用 `default` 配置,这个配置可使用配置文件或 traefik 启动参数中配置,如下:
```yaml
traefik:
image: "traefik:v2.3.2"
hostname: "traefik"
networks:
- traefik
command:
- "--log.level=INFO"
- "--api.insecure=true"
- "--providers.docker=true"
- "--providers.docker.exposedbydefault=false"
- "--providers.docker.swarmmode=true"
- "--providers.docker.network=traefik"
- "--providers.docker.watch=true"
- "--entrypoints.http.address=:80"
- "--entrypoints.https.address=:443"
- "--certificatesresolvers.default.acme.dnschallenge=true"
- "--certificatesresolvers.default.acme.dnschallenge.provider=alidns"
- "--certificatesresolvers.default.acme.dnschallenge.resolvers=ns1.alidns.com"
- "--certificatesresolvers.default.acme.email=linhehuo@gmail.com"
- "--certificatesresolvers.default.acme.storage=/letsencrypt/acme.json"
```
上面的启动参数配置了 `default` TSL 证书解析器和自动 acme 认证(自动证书申请和延期)。
- 中间件 `redirect-to-https`:配置从 HTTP 到 HTTPS 的转发,强制使用安全连接。
```yaml
- "traefik.http.middlewares.redirect-to-https.redirectscheme.scheme=https"
```
- 中间件 `check-header`:配置自定义 Headers 检查。外部访问必须添加自定义 Header 并匹配 Header 值,避免非法访问。这在提供 API 访问时是一个非常简单有效的安全机制。
- 中间件 `tdengine-ipwhitelist`:配置 IP 白名单。仅允许指定 IP 访问,使用 CIDR 路由规则进行匹配,可以设置内网及外网 IP 地址。
## 总结
数据安全是 TDengine 产品的一项关键指标,这些措施旨在保护 TDengine 部署免受未经授权的访问和数据泄露,同时保持性能和功能。但 TDengine 自身的安全配置不是生产中的唯一保障,结合用户业务系统制定更加匹配客户需求的解决方案更加重要。

View File

@ -0,0 +1,86 @@
---
sidebar_label: Perspective
title: 与 Perspective 集成
toc_max_heading_level: 4
---
## 概述
Perspective 是一款开源且强大的数据可视化库,由 [Prospective.co](https://www.perspective.co/) 开发,运用 `WebAssembly` 和 `Web Workers` 技术,在 Web 应用中实现交互式实时数据分析,能在浏览器端提供高性能可视化能力。借助它,开发者可构建实时更新的仪表盘、图表等,用户能轻松与数据交互,按需求筛选、排序及挖掘数据。其灵活性高,适配多种数据格式与业务场景;速度快,处理大规模数据也能保障交互流畅;易用性佳,新手和专业开发者都能快速搭建可视化界面。
在数据连接方面Perspective 通过 TDengine 的 Python 连接器,完美支持 TDengine 数据源,可高效获取其中海量时序数据等各类数据,并提供展示复杂图表、深度统计分析和趋势预测等实时功能,助力用户洞察数据价值,为决策提供有力支持,是构建对实时数据可视化和分析要求高的应用的理想选择。
![perspective-architecture](./perspective/prsp_architecture.webp)
## 前置条件
在 Linux 系统中进行如下安装操作:
- TDengine 服务已部署并正常运行(企业及社区版均可)。
- taosAdapter 能够正常运行,详细参考 [taosAdapter 使用手册](../../../reference/components/taosadapter)。
- Python 3.10 及以上版本已安装(如未安装,可参考 [Python 安装](https://docs.python.org/)。
- 下载或克隆 [perspective-connect-demo](https://github.com/taosdata/perspective-connect-demo) 项目,进入项目根目录后运行 “install.sh” 脚本,以便在本地下载并安装 TDengine 客户端库以及相关的依赖项。
## 可视化数据
**第 1 步**,运行 [perspective-connect-demo](https://github.com/taosdata/perspective-connect-demo) 项目根目录中的 “run.sh” 脚本,以此启动 Perspective 服务。该服务会每隔 300 毫秒从 TDengine 数据库中获取一次数据,并将数据以流的形式传输至基于 Web 的 `Perspective Viewer` 。
```shell
sh run.sh
```
**第 2 步**,启动一个静态 Web 服务,随后在浏览器中访问 `prsp-viewer.html` 资源,便能展示可视化数据。
```python
python -m http.server 8081
```
通过浏览器访问该 Web 页面后所呈现出的效果如下图所示:
![perspective-viewer](./perspective/prsp_view.webp)
## 使用说明
### 写入数据
[perspective-connect-demo](https://github.com/taosdata/perspective-connect-demo) 项目根目录中的 `producer.py` 脚本,借助 TDengine Python 连接器,可定期向 TDengine 数据库插入数据。此脚本会生成随机数据并将其插入数据库,以此模拟实时数据的写入过程。具体执行步骤如下:
1. 建立与 TDengine 的连接。
1. 创建 power 数据库和 meters 表。
1. 每隔 300 毫秒生成一次随机数据,并写入 TDengine 数据库中。
Python 连接器详细写入说明可参见 [Python 参数绑定](../../../reference/connector/python/#参数绑定)。
### 加载数据
[perspective-connect-demo](https://github.com/taosdata/perspective-connect-demo) 项目根目录中的 `perspective_server.py` 脚本会启动一个 Perspective 服务器,该服务器会从 TDengine 读取数据,并通过 Tornado WebSocket 将数据流式传输到一个 Perspective 表中。
1. 启动一个 Perspective 服务器
1. 建立与 TDengine 的连接。
1. 创建一个 Perspective 表(表结构需要与 TDengine 数据库中表的类型保持匹配)。
1. 调用 `Tornado.PeriodicCallback` 函数来启动定时任务,进而实现对 Perspective 表数据的更新,示例代码如下:
```python
{{#include docs/examples/perspective/perspective_server.py:perspective_server}}
```
### HTML 页面配置
[perspective-connect-demo](https://github.com/taosdata/perspective-connect-demo) 项目根目录中的 `prsp-viewer.html`文件将 `Perspective Viewer` 嵌入到 HTML 页面中。它通过 WebSocket 连接到 Perspective 服务器,并根据图表配置显示实时数据。
- 配置展示的图表以及数据分析的规则。
- 与 Perspective 服务器建立 Websocket 连接。
- 引入 Perspective 库,通过 WebSocket 连接到 Perspective 服务器,加载 meters_values 表来展示动态数据。
```html
{{#include docs/examples/perspective/prsp-viewer.html:perspective_viewer}}
```
## 参考资料
- [Perspective 文档](https://perspective.finos.org/)
- [TDengine Python 连接器](../../../reference/connector/python)
- [TDengine 流计算](../../../advanced/stream/)

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 62 KiB

View File

@ -582,9 +582,9 @@ charset 的有效值是 UTF-8。
- 说明:当某块磁盘上的可用空间小于等于这个阈值时,该磁盘将不再被选择用于生成新的数据文件。 **`企业版参数`** - 说明:当某块磁盘上的可用空间小于等于这个阈值时,该磁盘将不再被选择用于生成新的数据文件。 **`企业版参数`**
- 类型:整数 - 类型:整数
- 单位byte - 单位byte
- 默认值52428800 - 默认值52428800 (50MB)
- 最小值52428800 - 最小值52428800 (50MB)
- 最大值:1073741824 - 最大值:2199023255552 (2TB)
- 动态修改:支持通过 SQL 修改,立即生效。 - 动态修改:支持通过 SQL 修改,立即生效。
- 支持版本:从 v3.1.0.0 版本开始引入 - 支持版本:从 v3.1.0.0 版本开始引入

View File

@ -290,6 +290,8 @@ taosBenchmark -f <json file>
其它通用参数详见 [通用配置参数](#通用配置参数)。 其它通用参数详见 [通用配置参数](#通用配置参数)。
**说明:从 v3.3.5.6 及以上版本不再支持 json 文件中同时配置 `specified_table_query``super_table_query`**
#### 执行指定查询语句 #### 执行指定查询语句
查询指定表(可以指定超级表、子表或普通表)的配置参数在 `specified_table_query` 中设置。 查询指定表(可以指定超级表、子表或普通表)的配置参数在 `specified_table_query` 中设置。
@ -416,6 +418,15 @@ taosBenchmark -f <json file>
</details> </details>
<details>
<summary>queryStb.json</summary>
```json
{{#include /TDengine/tools/taos-tools/example/queryStb.json}}
```
</details>
### 订阅 JSON 示例 ### 订阅 JSON 示例
<details> <details>

View File

@ -44,6 +44,7 @@ CREATE DATABASE db_name PRECISION 'ns';
| 16 | VARCHAR | 自定义 | BINARY 类型的别名 | | 16 | VARCHAR | 自定义 | BINARY 类型的别名 |
| 17 | GEOMETRY | 自定义 | 几何类型3.1.0.0 版本开始支持 | 17 | GEOMETRY | 自定义 | 几何类型3.1.0.0 版本开始支持
| 18 | VARBINARY | 自定义 | 可变长的二进制数据, 3.1.1.0 版本开始支持| | 18 | VARBINARY | 自定义 | 可变长的二进制数据, 3.1.1.0 版本开始支持|
| 19 | DECIMAL | 8或16 | 高精度数值类型, 取值范围取决于类型中指定的precision和scale, 自3.3.6开始支持, 见下文描述|
:::note :::note
@ -63,6 +64,18 @@ CREATE DATABASE db_name PRECISION 'ns';
::: :::
### DECIMAL数据类型
`DECIMAL`数据类型用于高精度数值存储, 自版本3.3.6开始支持, 定义语法: DECIMAL(18, 2), DECIMAL(38, 10), 其中需要指定两个参数, 分别为`precision`和`scale`. `precision`是指最大支持的有效数字个数, `scale`是指最大支持的小数位数. 如DECIMAL(8, 4), 可表示范围即[-9999.9999, 9999.9999]. 定义DECIMAL数据类型时, `precision`范围为: [1,38], scale的范围为: [0,precision], scale为0时, 仅表示整数. 也可以不指定scale, 默认为0, 如DECIMAL(18), 与DECIMAL(18,0)相同。
当`precision`值不大于18时, 内部使用8字节存储(DECIMAL64), 当precision范围为(18, 38]时, 使用16字节存储(DECIMAL). SQL中写入DECIMAL类型数据时, 可直接使用数值写入, 当写入值大于类型可表示的最大值时会报DECIMAL_OVERFLOW错误, 当未大于类型表示的最大值, 但小数位数超过SCALE时, 会自动四舍五入处理, 如定义类型DECIMAL(10, 2), 写入10.987, 则实际存储值为10.99。
DECIMAL类型仅支持普通列, 暂不支持tag列. DECIMAL类型只支持SQL写入 暂不支持stmt写入和schemeless写入。
整数类型和DECIMAL类型操作时, 会将整数类型转换为DECIMAL类型再进行计算. DECIMAL类型与DOUBLE/FLOAT/VARCHAR/NCHAR等类型计算时, 转换为DOUBLE类型进行计算.
查询DECIMAL类型表达式时, 若计算的中间结果超出当前类型可表示的最大值时, 报DECIMAL OVERFLOW错误.
## 常量 ## 常量
TDengine 支持多个类型的常量,细节如下表: TDengine 支持多个类型的常量,细节如下表:

View File

@ -1137,6 +1137,7 @@ CAST(expr AS type_name)
- 字符串类型转换数值类型时可能出现的无效字符情况,例如 "a" 可能转为 0但不会报错。 - 字符串类型转换数值类型时可能出现的无效字符情况,例如 "a" 可能转为 0但不会报错。
- 转换到数值类型时,数值大于 type_name 可表示的范围时,则会溢出,但不会报错。 - 转换到数值类型时,数值大于 type_name 可表示的范围时,则会溢出,但不会报错。
- 转换到字符串类型时,如果转换后长度超过 type_name 中指定的长度,则会截断,但不会报错。 - 转换到字符串类型时,如果转换后长度超过 type_name 中指定的长度,则会截断,但不会报错。
- DECIMAL类型不支持与JSON,VARBINARY,GEOMERTY类型的互转.
#### TO_CHAR #### TO_CHAR
@ -1618,12 +1619,14 @@ AVG(expr)
**功能说明**:统计指定字段的平均值。 **功能说明**:统计指定字段的平均值。
**返回数据类型**DOUBLE。 **返回数据类型**DOUBLE, DECIMAL
**适用数据类型**:数值类型。 **适用数据类型**:数值类型。
**适用于**:表和超级表。 **适用于**:表和超级表。
**说明**: 当输入类型为DECIMAL类型时, 输出类型也为DECIMAL类型, 输出的precision和scale大小符合数据类型章节中的描述规则, 通过计算SUM类型和UINT64的除法得到结果类型, 若SUM的结果导致DECIMAL类型溢出, 则报DECIMAL OVERFLOW错误。
### COUNT ### COUNT
```sql ```sql
@ -1805,12 +1808,14 @@ SUM(expr)
**功能说明**:统计表/超级表中某列的和。 **功能说明**:统计表/超级表中某列的和。
**返回数据类型**DOUBLE、BIGINT。 **返回数据类型**DOUBLE、BIGINT,DECIMAL
**适用数据类型**:数值类型。 **适用数据类型**:数值类型。
**适用于**:表和超级表。 **适用于**:表和超级表。
**说明**: 输入类型为DECIMAL类型时, 输出类型为DECIMAL(38, scale), precision为当前支持的最大值, scale为输入类型的scale, 若SUM的结果溢出时, 报DECIMAL OVERFLOW错误.
### VAR_POP ### VAR_POP
```sql ```sql
@ -2174,6 +2179,7 @@ ignore_null_values: {
- INTERP 用于在指定时间断面获取指定列的记录值,使用时有专用语法(interp_clause),语法介绍[参考链接](../select/#interp) 。 - INTERP 用于在指定时间断面获取指定列的记录值,使用时有专用语法(interp_clause),语法介绍[参考链接](../select/#interp) 。
- 当指定时间断面不存在符合条件的行数据时INTERP 函数会根据 [FILL](../distinguished/#fill-子句) 参数的设定进行插值。 - 当指定时间断面不存在符合条件的行数据时INTERP 函数会根据 [FILL](../distinguished/#fill-子句) 参数的设定进行插值。
- INTERP 作用于超级表时,会将该超级表下的所有子表数据按照主键列排序后进行插值计算,也可以搭配 PARTITION BY tbname 使用,将结果强制规约到单个时间线。 - INTERP 作用于超级表时,会将该超级表下的所有子表数据按照主键列排序后进行插值计算,也可以搭配 PARTITION BY tbname 使用,将结果强制规约到单个时间线。
- INTERP在FILL PREV/NEXT/NEAR时, 行为与窗口查询有所区别, 当截面存在数据时, 不会进行FILL, 即便当前值为NULL.
- INTERP 可以与伪列 `_irowts` 一起使用,返回插值点所对应的时间戳(v3.0.2.0 以后支持)。 - INTERP 可以与伪列 `_irowts` 一起使用,返回插值点所对应的时间戳(v3.0.2.0 以后支持)。
- INTERP 可以与伪列 `_isfilled` 一起使用,显示返回结果是否为原始记录或插值算法产生的数据(v3.0.3.0 以后支持)。 - INTERP 可以与伪列 `_isfilled` 一起使用,显示返回结果是否为原始记录或插值算法产生的数据(v3.0.3.0 以后支持)。
- 只有在使用 FILL PREV/NEXT/NEAR 模式时才可以使用伪列 `_irowts_origin`, 用于返回 `interp` 函数所使用的原始数据的时间戳列。若范围内无值, 则返回 NULL。`_irowts_origin` 在 v3.3.4.9 以后支持。 - 只有在使用 FILL PREV/NEXT/NEAR 模式时才可以使用伪列 `_irowts_origin`, 用于返回 `interp` 函数所使用的原始数据的时间戳列。若范围内无值, 则返回 NULL。`_irowts_origin` 在 v3.3.4.9 以后支持。

View File

@ -77,10 +77,10 @@ FILL 语句指定某一窗口区间数据缺失的情况下的填充模式。填
1. 不进行填充NONE默认填充模式 1. 不进行填充NONE默认填充模式
2. VALUE 填充:固定值填充,此时需要指定填充的数值。例如 `FILL(VALUE, 1.23)`。这里需要注意,最终填充的值受由相应列的类型决定,如 `FILL(VALUE, 1.23)`,相应列为 INT 类型,则填充值为 1若查询列表中有多列需要 FILL则需要给每一个 FILL 列指定 VALUE`SELECT _wstart, min(c1), max(c1) FROM ... FILL(VALUE, 0, 0)`注意SELECT 表达式中只有包含普通列时才需要指定 FILL VALUE`_wstart`、`_wstart+1a`、`now`、`1+1` 以及使用 `partition by` 时的 `partition key` (如 tbname)都不需要指定 VALUE`timediff(last(ts), _wstart)` 则需要指定 VALUE。 2. VALUE 填充:固定值填充,此时需要指定填充的数值。例如 `FILL(VALUE, 1.23)`。这里需要注意,最终填充的值受由相应列的类型决定,如 `FILL(VALUE, 1.23)`,相应列为 INT 类型,则填充值为 1若查询列表中有多列需要 FILL则需要给每一个 FILL 列指定 VALUE`SELECT _wstart, min(c1), max(c1) FROM ... FILL(VALUE, 0, 0)`注意SELECT 表达式中只有包含普通列时才需要指定 FILL VALUE`_wstart`、`_wstart+1a`、`now`、`1+1` 以及使用 `partition by` 时的 `partition key` (如 tbname)都不需要指定 VALUE`timediff(last(ts), _wstart)` 则需要指定 VALUE。
3. PREV 填充:使用前一个非 NULL 值填充数据。例如 FILL(PREV)。 3. PREV 填充:使用前一个值填充数据。例如 FILL(PREV)。
4. NULL 填充:使用 NULL 填充数据。例如 FILL(NULL)。 4. NULL 填充:使用 NULL 填充数据。例如 FILL(NULL)。
5. LINEAR 填充:根据前后距离最近的非 NULL 值做线性插值填充。例如 FILL(LINEAR)。 5. LINEAR 填充:根据前后距离最近的非 NULL 值做线性插值填充。例如 FILL(LINEAR)。
6. NEXT 填充:使用下一个非 NULL 值填充数据。例如 FILL(NEXT)。 6. NEXT 填充:使用下一个值填充数据。例如 FILL(NEXT)。
以上填充模式中,除了 NONE 模式默认不填充值之外,其他模式在查询的整个时间范围内如果没有数据 FILL 子句将被忽略即不产生填充数据查询结果为空。这种行为在部分模式PREV、NEXT、LINEAR下具有合理性因为在这些模式下没有数据意味着无法产生填充数值。而对另外一些模式NULL、VALUE来说理论上是可以产生填充数值的至于需不需要输出填充数值取决于应用的需求。所以为了满足这类需要强制填充数据或 NULL 的应用的需求,同时不破坏现有填充模式的行为兼容性,从 v3.0.3.0 开始,增加了两种新的填充模式: 以上填充模式中,除了 NONE 模式默认不填充值之外,其他模式在查询的整个时间范围内如果没有数据 FILL 子句将被忽略即不产生填充数据查询结果为空。这种行为在部分模式PREV、NEXT、LINEAR下具有合理性因为在这些模式下没有数据意味着无法产生填充数值。而对另外一些模式NULL、VALUE来说理论上是可以产生填充数值的至于需不需要输出填充数值取决于应用的需求。所以为了满足这类需要强制填充数据或 NULL 的应用的需求,同时不破坏现有填充模式的行为兼容性,从 v3.0.3.0 开始,增加了两种新的填充模式:
@ -104,7 +104,7 @@ NULL、NULL_F、VALUE、 VALUE_F 这几种填充模式针对不同场景区别
时间窗口又可分为滑动时间窗口和翻转时间窗口。 时间窗口又可分为滑动时间窗口和翻转时间窗口。
INTERVAL 子句用于产生相等时间周期的窗口SLIDING 用以指定窗口向前滑动的时间。每次执行的查询是一个时间窗口时间窗口随着时间流动向前滑动。在定义连续查询的时候需要指定时间窗口time window 大小和每次前向增量时间forward sliding times。如图[t0s, t0e] [t1s , t1e][t2s, t2e] 是分别是执行三次连续查询的时间窗口范围,窗口的前向滑动的时间范围 sliding time 标识 。查询过滤、聚合等操作按照每个时间窗口为独立的单位执行。当 SLIDING 与 INTERVAL 相等的时候,滑动窗口即为翻转窗口。 INTERVAL 子句用于产生相等时间周期的窗口SLIDING 用以指定窗口向前滑动的时间。每次执行的查询是一个时间窗口时间窗口随着时间流动向前滑动。在定义连续查询的时候需要指定时间窗口time window 大小和每次前向增量时间forward sliding times。如图[t0s, t0e] [t1s , t1e][t2s, t2e] 是分别是执行三次连续查询的时间窗口范围,窗口的前向滑动的时间范围 sliding time 标识 。查询过滤、聚合等操作按照每个时间窗口为独立的单位执行。当 SLIDING 与 INTERVAL 相等的时候,滑动窗口即为翻转窗口。默认情况下,窗口是从 Unix time 01970-01-01 00:00:00 UTC开始划分的如果设置了 interval_offset那么窗口的划分将从 “Unix time 0 + interval_offset” 开始。
![TDengine Database 时间窗口示意图](./timewindow-1.webp) ![TDengine Database 时间窗口示意图](./timewindow-1.webp)

View File

@ -37,6 +37,7 @@ description: 可配置压缩算法
| float/double | disabled/delta-d | delta-d | lz4/zlib/zstd/xz/tsz | lz4 | medium | | float/double | disabled/delta-d | delta-d | lz4/zlib/zstd/xz/tsz | lz4 | medium |
| binary/nchar | disabled | disabled | lz4/zlib/zstd/xz | zstd | medium | | binary/nchar | disabled | disabled | lz4/zlib/zstd/xz | zstd | medium |
| bool | disabled/bit-packing | bit-packing | lz4/zlib/zstd/xz | zstd | medium | | bool | disabled/bit-packing | bit-packing | lz4/zlib/zstd/xz | zstd | medium |
| decimal | disabled | disabled | lz4/zlib/zstd/xz | zstd | medium |
## SQL 语法 ## SQL 语法

View File

@ -680,7 +680,7 @@ TDengine 客户端驱动的版本号与 TDengine 服务端的版本号是一一
- **接口说明**:清理运行环境,应用退出前应调用。 - **接口说明**:清理运行环境,应用退出前应调用。
- `int taos_options(TSDB_OPTION option, const void * arg, ...)` - `int taos_options(TSDB_OPTION option, const void * arg, ...)`
- **接口说明**:设置客户端选项,支持区域设置(`TSDB_OPTION_LOCALE`)、字符集设置(`TSDB_OPTION_CHARSET`)、时区设置(`TSDB_OPTION_TIMEZONE`)、配置文件路径设置(`TSDB_OPTION_CONFIGDIR`)。区域设置、字符集、时区默认为操作系统当前设置。 - **接口说明**:设置客户端选项,支持区域设置(`TSDB_OPTION_LOCALE`)、字符集设置(`TSDB_OPTION_CHARSET`)、时区设置(`TSDB_OPTION_TIMEZONE`)、配置文件路径设置(`TSDB_OPTION_CONFIGDIR`、驱动类型设置(`TSDB_OPTION_DRIVER`。区域设置、字符集、时区默认为操作系统当前设置。驱动类型可选内部原生接口(`native`)和 WebSocket 接口(`websocket`),默认为 `websocket`。
- **参数说明** - **参数说明**
- `option`[入参] 设置项类型。 - `option`[入参] 设置项类型。
- `arg`[入参] 设置项值。 - `arg`[入参] 设置项值。
@ -826,6 +826,12 @@ TDengine 客户端驱动的版本号与 TDengine 服务端的版本号是一一
- res[入参] 结果集。 - res[入参] 结果集。
- **返回值**:非 `NULL`:成功,返回一个指向 TAOS_FIELD 结构体的指针,每个元素代表一列的元数据。`NULL`:失败。 - **返回值**:非 `NULL`:成功,返回一个指向 TAOS_FIELD 结构体的指针,每个元素代表一列的元数据。`NULL`:失败。
- `TAOS_FIELD_E *taos_fetch_fields_e(TAOS_RES *res)`
- **接口说明**:获取查询结果集每列数据的属性(列的名称、列的数据类型、列的长度),与 `taos_num_fields()` 配合使用,可用来解析 `taos_fetch_row()` 返回的一个元组(一行)的数据。TAOS_FIELD_E中除了TAOS_FIELD的基本信息外, 还包括了类型的`precision`和`scale`信息。
- **参数说明**
- res[入参] 结果集。
- **返回值**:非 `NULL`:成功,返回一个指向 TAOS_FIELD_E 结构体的指针,每个元素代表一列的元数据。`NULL`:失败。
- `void taos_stop_query(TAOS_RES *res)` - `void taos_stop_query(TAOS_RES *res)`
- **接口说明**:停止当前查询的执行。 - **接口说明**:停止当前查询的执行。
- **参数说明** - **参数说明**

View File

@ -121,6 +121,7 @@ JDBC 连接器可能报错的错误码包括 4 种:
| 0x2378 | consumer create error | 创建数据订阅失败,请根据错误信息检查 taos log 进行问题定位。 | | 0x2378 | consumer create error | 创建数据订阅失败,请根据错误信息检查 taos log 进行问题定位。 |
| 0x2379 | seek offset must not be a negative number | seek 接口参数不能为负值,请使用正确的参数 | | 0x2379 | seek offset must not be a negative number | seek 接口参数不能为负值,请使用正确的参数 |
| 0x237a | vGroup not found in result set | VGroup 没有分配给当前 consumer由于 Rebalance 机制导致 Consumer 与 VGroup 不是绑定的关系 | | 0x237a | vGroup not found in result set | VGroup 没有分配给当前 consumer由于 Rebalance 机制导致 Consumer 与 VGroup 不是绑定的关系 |
| 0x2390 | background thread write error in Efficient Writing | 高效写入后台线程写入错误,可以停止写入,重建连接 |
- [TDengine Java Connector Error Code](https://github.com/taosdata/taos-connector-jdbc/blob/main/src/main/java/com/taosdata/jdbc/TSDBErrorNumbers.java) - [TDengine Java Connector Error Code](https://github.com/taosdata/taos-connector-jdbc/blob/main/src/main/java/com/taosdata/jdbc/TSDBErrorNumbers.java)
<!-- - [TDengine_ERROR_CODE](../error-code) --> <!-- - [TDengine_ERROR_CODE](../error-code) -->
@ -317,6 +318,14 @@ properties 中的配置参数如下:
- TSDBDriver.PROPERTY_KEY_APP_NAMEApp 名称,可用于 `show connections` 查询结果显示。仅在使用 WebSocket 连接时生效。默认值为 java。 - TSDBDriver.PROPERTY_KEY_APP_NAMEApp 名称,可用于 `show connections` 查询结果显示。仅在使用 WebSocket 连接时生效。默认值为 java。
- TSDBDriver.PROPERTY_KEY_APP_IPApp IP可用于 `show connections` 查询结果显示。仅在使用 WebSocket 连接时生效。默认值为空。 - TSDBDriver.PROPERTY_KEY_APP_IPApp IP可用于 `show connections` 查询结果显示。仅在使用 WebSocket 连接时生效。默认值为空。
- TSDBDriver.PROPERTY_KEY_ASYNC_WRITE高效写入模式目前仅支持 `stmt` 方式。仅在使用 WebSocket 连接时生效。默认值为空,即不启用高效写入模式。
- TSDBDriver.PROPERTY_KEY_BACKEND_WRITE_THREAD_NUM高效写入模式下后台写入线程数。仅在使用 WebSocket 连接时生效。默认值为 10。
- TSDBDriver.PROPERTY_KEY_BATCH_SIZE_BY_ROW高效写入模式下写入数据的批大小单位是行。仅在使用 WebSocket 连接时生效。默认值为 1000。
- TSDBDriver.PROPERTY_KEY_CACHE_SIZE_BY_ROW高效写入模式下缓存的大小单位是行。仅在使用 WebSocket 连接时生效。默认值为 10000。
- TSDBDriver.PROPERTY_KEY_COPY_DATA高效写入模式下是否拷贝应用通过 addBatch 传入的二进制类型数据。仅在使用 WebSocket 连接时生效。默认值为 false。
- TSDBDriver.PROPERTY_KEY_STRICT_CHECK高效写入模式下是否校验表名长度和变长数据类型长度。仅在使用 WebSocket 连接时生效。默认值为 false。
- TSDBDriver.PROPERTY_KEY_RETRY_TIMES高效写入模式下写入失败重试次数。仅在使用 WebSocket 连接时生效。默认值为 3。
此外对 JDBC 原生连接,通过指定 URL 和 Properties 还可以指定其他参数比如日志级别、SQL 长度等。 此外对 JDBC 原生连接,通过指定 URL 和 Properties 还可以指定其他参数比如日志级别、SQL 长度等。
**配置参数的优先级** **配置参数的优先级**

View File

@ -24,6 +24,7 @@ Node.js 连接器源码托管在 [GitHub](https://github.com/taosdata/taos-conne
| Node.js 连接器 版本 | 主要变化 | TDengine 版本 | | Node.js 连接器 版本 | 主要变化 | TDengine 版本 |
| ------------------| ----------------------| ----------------| | ------------------| ----------------------| ----------------|
| 3.1.5 | 密码支持特殊字符 | - |
| 3.1.4 | 修改 readme | - | | 3.1.4 | 修改 readme | - |
| 3.1.3 | 升级了 es5-ext 版本,解决低版本的漏洞 | - | | 3.1.3 | 升级了 es5-ext 版本,解决低版本的漏洞 | - |
| 3.1.2 | 对数据协议和解析进行了优化,性能得到大幅提升| - | | 3.1.2 | 对数据协议和解析进行了优化,性能得到大幅提升| - |

View File

@ -44,6 +44,8 @@ description: TDengine 服务端的错误码列表和详细说明
| 0x80000107 | Ref ID is removed | 引用的 ref 资源已经释放 | 保留现场和日志github 上报 issue | | 0x80000107 | Ref ID is removed | 引用的 ref 资源已经释放 | 保留现场和日志github 上报 issue |
| 0x80000108 | Invalid Ref ID | 无效 ref ID | 保留现场和日志github 上报 issue | | 0x80000108 | Invalid Ref ID | 无效 ref ID | 保留现场和日志github 上报 issue |
| 0x8000010A | Ref is not there | ref 信息不存在 | 保留现场和日志github 上报 issue | | 0x8000010A | Ref is not there | ref 信息不存在 | 保留现场和日志github 上报 issue |
| 0x8000010B | Driver was not loaded | 未在系统路径中找到 libtaosnative.so 或 libtaosws.so | 重新安装客户端驱动 |
| 0x8000010C | Function was not loaded from the driver | 在 libtaos.so 中定义的一些函数在 libtaosnative.so 或 libtaosws.so 中未实现 | 保留现场和日志github 上报 issue |
| 0x80000110 | Unexpected generic error | 系统内部错误 | 保留现场和日志github 上报 issue | | 0x80000110 | Unexpected generic error | 系统内部错误 | 保留现场和日志github 上报 issue |
| 0x80000111 | Action in progress | 操作进行中 | 1.等待操作完成 2.根据需要取消操作 3.当超出合理时间仍然未完成可保留现场和日志,或联系客户支持 | | 0x80000111 | Action in progress | 操作进行中 | 1.等待操作完成 2.根据需要取消操作 3.当超出合理时间仍然未完成可保留现场和日志,或联系客户支持 |
| 0x80000112 | Out of range | 配置参数超出允许值范围 | 更改参数 | | 0x80000112 | Out of range | 配置参数超出允许值范围 | 更改参数 |
@ -579,10 +581,13 @@ description: TDengine 服务端的错误码列表和详细说明
## virtual table ## virtual table
| 错误码 | 错误描述 | 可能的出错场景或者可能的原因 | 建议用户采取的措施 | | 错误码 | 错误描述 | 可能的出错场景或者可能的原因 | 建议用户采取的措施 |
|------------|---------------------------------------------------------|------------------------------------------------|----------------------------| |------------|---------------------------------------------------------|------------------------------------------------|-------------------------|
| 0x80006200 | Virtual table scan 算子内部错误 | virtual table scan 算子内部逻辑错误,一般不会出现 | 具体查看client端的错误日志提示 | | 0x80006200 | Virtual table scan 算子内部错误 | virtual table scan 算子内部逻辑错误,一般不会出现 | 具体查看client端的错误日志提示 |
| 0x80006201 | Virtual table scan invalid downstream operator type | 由于生成的执行计划不对,导致 virtual table scan 算子的下游算子类型不正确 | 保留 explain 执行计划,联系开发处理 | | 0x80006201 | Virtual table scan invalid downstream operator type | 由于生成的执行计划不对,导致 virtual table scan 算子的下游算子类型不正确 | 保留 explain 执行计划,联系开发处理 |
| 0x80006202 | Virtual table prim timestamp column should not has ref | 虚拟表的时间戳主键列不应该有数据源,如果有,后续查询虚拟表的时候就会出现该错误 | 检查错误日志,联系开发处理 | | 0x80006202 | Virtual table prim timestamp column should not has ref | 虚拟表的时间戳主键列不应该有数据源,如果有,后续查询虚拟表的时候就会出现该错误 | 检查错误日志,联系开发处理 |
| 0x80006203 | Create virtual child table must use virtual super table | 虚拟子表必须建在虚拟超级表下,否则就会出现该错误 | 创建虚拟子表的时候USING 虚拟超级表 | | 0x80006203 | Create virtual child table must use virtual super table | 虚拟子表必须建在虚拟超级表下,否则就会出现该错误 | 创建虚拟子表的时候USING 虚拟超级表 |
| 0x80006204 | Virtual table not support decimal type | 虚拟表不支持 decimal 类型 | 创建虚拟表时不使用 decimal 类型的列/tag | | 0x80006204 | Virtual table not support decimal type | 虚拟表不支持 decimal 类型 | 创建虚拟表时不使用 decimal 类型的列/tag |
| 0x80006205 | Virtual table not support in STMT query and STMT insert | 不支持在 stmt 写入和查询中使用虚拟表 | 不在 stmt 写入和查询中使用虚拟表 |
| 0x80006206 | Virtual table not support in Topic | 不支持在订阅中使用虚拟表 | 不在订阅中使用虚拟表 |
| 0x80006207 | Virtual super table query not support origin table from different databases | 虚拟超级表不支持子表的数据源来自不同的数据库 | 确保虚拟超级表的子表的数据源都来自同一个数据库 |

View File

@ -43,6 +43,8 @@ TDengine 在不同组件中均支持使用 IANA 时区(除 Windows taos.cfg
夏令时Daylight Saving TimeDST是一种通过将时间提前一小时以充分利用日光、节约能源的制度。通常在春季开始秋季结束。夏令时的具体开始和结束时间因地区而异。以下均以柏林时间为例对夏令时和夏令时的影响做说明。 夏令时Daylight Saving TimeDST是一种通过将时间提前一小时以充分利用日光、节约能源的制度。通常在春季开始秋季结束。夏令时的具体开始和结束时间因地区而异。以下均以柏林时间为例对夏令时和夏令时的影响做说明。
![DST Berlin](./02-dst/dst-berlin.png)
按照这个规则,可以看到: 按照这个规则,可以看到:
- 柏林当地时间 2024 年 03 月 31 日 02:00:00 到 03:00:00 (不含 03:00:00之间的时间不存在跳变 - 柏林当地时间 2024 年 03 月 31 日 02:00:00 到 03:00:00 (不含 03:00:00之间的时间不存在跳变
@ -92,7 +94,7 @@ select * from t1 where ts >= '2024-10-27T01:59:59.000Z';
我们使用下表来展示夏令时在写入和查询中的影响。 我们使用下表来展示夏令时在写入和查询中的影响。
![DST Berlin](./02-dst/dst-berlin.png) ![DST Table](./02-dst/dst-table.png)
### 表格说明 ### 表格说明

View File

@ -42,27 +42,27 @@ IF(TD_LINUX)
) )
target_link_libraries(tmq target_link_libraries(tmq
${TAOS_LIB} ${TAOS_NATIVE_LIB}
) )
target_link_libraries(stream_demo target_link_libraries(stream_demo
${TAOS_LIB} ${TAOS_NATIVE_LIB}
) )
target_link_libraries(schemaless target_link_libraries(schemaless
${TAOS_LIB} ${TAOS_NATIVE_LIB}
) )
target_link_libraries(prepare target_link_libraries(prepare
${TAOS_LIB} ${TAOS_NATIVE_LIB}
) )
target_link_libraries(demo target_link_libraries(demo
${TAOS_LIB} ${TAOS_NATIVE_LIB}
) )
target_link_libraries(asyncdemo target_link_libraries(asyncdemo
${TAOS_LIB} ${TAOS_NATIVE_LIB}
) )
SET_TARGET_PROPERTIES(tmq PROPERTIES OUTPUT_NAME tmq) SET_TARGET_PROPERTIES(tmq PROPERTIES OUTPUT_NAME tmq)

View File

@ -62,6 +62,7 @@ typedef enum {
TSDB_OPTION_CONFIGDIR, TSDB_OPTION_CONFIGDIR,
TSDB_OPTION_SHELL_ACTIVITY_TIMER, TSDB_OPTION_SHELL_ACTIVITY_TIMER,
TSDB_OPTION_USE_ADAPTER, TSDB_OPTION_USE_ADAPTER,
TSDB_OPTION_DRIVER,
TSDB_MAX_OPTIONS TSDB_MAX_OPTIONS
} TSDB_OPTION; } TSDB_OPTION;
@ -155,11 +156,14 @@ typedef enum {
TAOS_NOTIFY_USER_DROPPED = 2, TAOS_NOTIFY_USER_DROPPED = 2,
} TAOS_NOTIFY_TYPE; } TAOS_NOTIFY_TYPE;
/* -- implemented in the native interface, for internal component only, the API may change -- */
#define RET_MSG_LENGTH 1024 #define RET_MSG_LENGTH 1024
typedef struct setConfRet { typedef struct setConfRet {
SET_CONF_RET_CODE retCode; SET_CONF_RET_CODE retCode;
char retMsg[RET_MSG_LENGTH]; char retMsg[RET_MSG_LENGTH];
} setConfRet; } setConfRet;
DLL_EXPORT setConfRet taos_set_config(const char *config); // implemented in the native interface
/* -- end -- */
typedef struct TAOS_VGROUP_HASH_INFO { typedef struct TAOS_VGROUP_HASH_INFO {
int32_t vgId; int32_t vgId;
@ -182,11 +186,10 @@ typedef struct TAOS_STMT_OPTIONS {
bool singleTableBindOnce; bool singleTableBindOnce;
} TAOS_STMT_OPTIONS; } TAOS_STMT_OPTIONS;
DLL_EXPORT int taos_init(void);
DLL_EXPORT void taos_cleanup(void); DLL_EXPORT void taos_cleanup(void);
DLL_EXPORT int taos_options(TSDB_OPTION option, const void *arg, ...); DLL_EXPORT int taos_options(TSDB_OPTION option, const void *arg, ...);
DLL_EXPORT int taos_options_connection(TAOS *taos, TSDB_OPTION_CONNECTION option, const void *arg, ...); DLL_EXPORT int taos_options_connection(TAOS *taos, TSDB_OPTION_CONNECTION option, const void *arg, ...);
DLL_EXPORT setConfRet taos_set_config(const char *config);
DLL_EXPORT int taos_init(void);
DLL_EXPORT TAOS *taos_connect(const char *ip, const char *user, const char *pass, const char *db, uint16_t port); DLL_EXPORT TAOS *taos_connect(const char *ip, const char *user, const char *pass, const char *db, uint16_t port);
DLL_EXPORT TAOS *taos_connect_auth(const char *ip, const char *user, const char *auth, const char *db, uint16_t port); DLL_EXPORT TAOS *taos_connect_auth(const char *ip, const char *user, const char *auth, const char *db, uint16_t port);
DLL_EXPORT void taos_close(TAOS *taos); DLL_EXPORT void taos_close(TAOS *taos);
@ -220,6 +223,7 @@ DLL_EXPORT char *taos_stmt_errstr(TAOS_STMT *stmt);
DLL_EXPORT int taos_stmt_affected_rows(TAOS_STMT *stmt); DLL_EXPORT int taos_stmt_affected_rows(TAOS_STMT *stmt);
DLL_EXPORT int taos_stmt_affected_rows_once(TAOS_STMT *stmt); DLL_EXPORT int taos_stmt_affected_rows_once(TAOS_STMT *stmt);
/* -- implemented in the native interface, for internal component only, the API may change -- */
typedef void TAOS_STMT2; typedef void TAOS_STMT2;
typedef struct TAOS_STMT2_OPTION { typedef struct TAOS_STMT2_OPTION {
@ -257,6 +261,7 @@ DLL_EXPORT int taos_stmt2_get_fields(TAOS_STMT2 *stmt, int *count, TAOS_FIELD_AL
DLL_EXPORT void taos_stmt2_free_fields(TAOS_STMT2 *stmt, TAOS_FIELD_ALL *fields); DLL_EXPORT void taos_stmt2_free_fields(TAOS_STMT2 *stmt, TAOS_FIELD_ALL *fields);
DLL_EXPORT TAOS_RES *taos_stmt2_result(TAOS_STMT2 *stmt); DLL_EXPORT TAOS_RES *taos_stmt2_result(TAOS_STMT2 *stmt);
DLL_EXPORT char *taos_stmt2_error(TAOS_STMT2 *stmt); DLL_EXPORT char *taos_stmt2_error(TAOS_STMT2 *stmt);
/* -- end -- */
DLL_EXPORT TAOS_RES *taos_query(TAOS *taos, const char *sql); DLL_EXPORT TAOS_RES *taos_query(TAOS *taos, const char *sql);
DLL_EXPORT TAOS_RES *taos_query_with_reqid(TAOS *taos, const char *sql, int64_t reqId); DLL_EXPORT TAOS_RES *taos_query_with_reqid(TAOS *taos, const char *sql, int64_t reqId);
@ -313,9 +318,11 @@ DLL_EXPORT void taos_set_hb_quit(int8_t quitByKill);
DLL_EXPORT int taos_set_notify_cb(TAOS *taos, __taos_notify_fn_t fp, void *param, int type); DLL_EXPORT int taos_set_notify_cb(TAOS *taos, __taos_notify_fn_t fp, void *param, int type);
/* -- implemented in the native interface, for internal component only, the API may change -- */
typedef void (*__taos_async_whitelist_fn_t)(void *param, int code, TAOS *taos, int numOfWhiteLists, typedef void (*__taos_async_whitelist_fn_t)(void *param, int code, TAOS *taos, int numOfWhiteLists,
uint64_t *pWhiteLists); uint64_t *pWhiteLists);
DLL_EXPORT void taos_fetch_whitelist_a(TAOS *taos, __taos_async_whitelist_fn_t fp, void *param); DLL_EXPORT void taos_fetch_whitelist_a(TAOS *taos, __taos_async_whitelist_fn_t fp, void *param);
/* ---- end ---- */
typedef enum { typedef enum {
TAOS_CONN_MODE_BI = 0, TAOS_CONN_MODE_BI = 0,
@ -414,7 +421,7 @@ DLL_EXPORT int32_t tmq_get_vgroup_id(TAOS_RES *res);
DLL_EXPORT int64_t tmq_get_vgroup_offset(TAOS_RES *res); DLL_EXPORT int64_t tmq_get_vgroup_offset(TAOS_RES *res);
DLL_EXPORT const char *tmq_err2str(int32_t code); DLL_EXPORT const char *tmq_err2str(int32_t code);
/* ------------------------------ TAOSX INTERFACE -----------------------------------*/ /* -- implemented in the native interface, for internal component(TAOSX) only, the API may change -- */
typedef struct tmq_raw_data { typedef struct tmq_raw_data {
void *raw; void *raw;
uint32_t raw_len; uint32_t raw_len;
@ -435,8 +442,9 @@ DLL_EXPORT void tmq_free_raw(tmq_raw_data raw);
// Returning null means error. Returned result need to be freed by tmq_free_json_meta // Returning null means error. Returned result need to be freed by tmq_free_json_meta
DLL_EXPORT char *tmq_get_json_meta(TAOS_RES *res); DLL_EXPORT char *tmq_get_json_meta(TAOS_RES *res);
DLL_EXPORT void tmq_free_json_meta(char *jsonMeta); DLL_EXPORT void tmq_free_json_meta(char *jsonMeta);
/* ---------------------------- TAOSX END -------------------------------- */ /* ---- end ---- */
/* -- implemented in the native interface, for internal component only, the API may change -- */
typedef enum { typedef enum {
TSDB_SRV_STATUS_UNAVAILABLE = 0, TSDB_SRV_STATUS_UNAVAILABLE = 0,
TSDB_SRV_STATUS_NETWORK_OK = 1, TSDB_SRV_STATUS_NETWORK_OK = 1,
@ -446,7 +454,10 @@ typedef enum {
} TSDB_SERVER_STATUS; } TSDB_SERVER_STATUS;
DLL_EXPORT TSDB_SERVER_STATUS taos_check_server_status(const char *fqdn, int port, char *details, int maxlen); DLL_EXPORT TSDB_SERVER_STATUS taos_check_server_status(const char *fqdn, int port, char *details, int maxlen);
DLL_EXPORT void taos_write_crashinfo(int signum, void *sigInfo, void *context);
DLL_EXPORT char *getBuildInfo(); DLL_EXPORT char *getBuildInfo();
/* ---- end ---- */
#ifdef __cplusplus #ifdef __cplusplus
} }
#endif #endif

View File

@ -29,6 +29,7 @@ extern "C" {
#define ANALY_FORECAST_DEFAULT_CONF 95 #define ANALY_FORECAST_DEFAULT_CONF 95
#define ANALY_FORECAST_DEFAULT_WNCHECK 1 #define ANALY_FORECAST_DEFAULT_WNCHECK 1
#define ANALY_FORECAST_MAX_ROWS 40000 #define ANALY_FORECAST_MAX_ROWS 40000
#define ANALY_FORECAST_RES_MAX_ROWS 1024
#define ANALY_ANOMALY_WINDOW_MAX_ROWS 40000 #define ANALY_ANOMALY_WINDOW_MAX_ROWS 40000
#define ANALY_DEFAULT_TIMEOUT 60 #define ANALY_DEFAULT_TIMEOUT 60
#define ANALY_MAX_TIMEOUT 600 #define ANALY_MAX_TIMEOUT 600

View File

@ -1430,6 +1430,7 @@ typedef struct {
int64_t watermark1; int64_t watermark1;
int64_t watermark2; int64_t watermark2;
int32_t ttl; int32_t ttl;
int32_t keep;
SArray* pFuncs; SArray* pFuncs;
int32_t commentLen; int32_t commentLen;
char* pComment; char* pComment;

View File

@ -102,6 +102,8 @@ int32_t qSetStreamOpOpen(qTaskInfo_t tinfo);
int32_t qSetStreamNotifyInfo(qTaskInfo_t tinfo, int32_t eventTypes, const SSchemaWrapper* pSchemaWrapper, int32_t qSetStreamNotifyInfo(qTaskInfo_t tinfo, int32_t eventTypes, const SSchemaWrapper* pSchemaWrapper,
const char* stbFullName, bool newSubTableRule, STaskNotifyEventStat* pNotifyEventStat); const char* stbFullName, bool newSubTableRule, STaskNotifyEventStat* pNotifyEventStat);
void qSetStreamMergeInfo(qTaskInfo_t tinfo, SArray* pVTables);
/** /**
* Set multiple input data blocks for the stream scan. * Set multiple input data blocks for the stream scan.
* @param tinfo * @param tinfo

View File

@ -263,6 +263,7 @@ typedef struct SDynQueryCtrlStbJoin {
typedef struct SDynQueryCtrlVtbScan { typedef struct SDynQueryCtrlVtbScan {
bool scanAllCols; bool scanAllCols;
char dbName[TSDB_DB_NAME_LEN];
uint64_t suid; uint64_t suid;
SVgroupsInfo* pVgroupList; SVgroupsInfo* pVgroupList;
} SDynQueryCtrlVtbScan; } SDynQueryCtrlVtbScan;
@ -666,6 +667,7 @@ typedef struct SStbJoinDynCtrlBasic {
typedef struct SVtbScanDynCtrlBasic { typedef struct SVtbScanDynCtrlBasic {
bool scanAllCols; bool scanAllCols;
char dbName[TSDB_DB_NAME_LEN];
uint64_t suid; uint64_t suid;
int32_t accountId; int32_t accountId;
SEpSet mgmtEpSet; SEpSet mgmtEpSet;

View File

@ -54,6 +54,7 @@ extern "C" {
#include <sys/wait.h> #include <sys/wait.h>
#if defined(DARWIN) #if defined(DARWIN)
#include <pwd.h>
#else #else
#if !defined(TD_ASTRA) #if !defined(TD_ASTRA)
#include <argp.h> #include <argp.h>

View File

@ -112,7 +112,9 @@ bool taosDirEntryIsDir(TdDirEntryPtr pDirEntry);
char *taosGetDirEntryName(TdDirEntryPtr pDirEntry); char *taosGetDirEntryName(TdDirEntryPtr pDirEntry);
int32_t taosCloseDir(TdDirPtr *ppDir); int32_t taosCloseDir(TdDirPtr *ppDir);
int taosGetDirSize(const char *path, int64_t *size); int32_t taosAppPath(char *path, int32_t maxLen);
int32_t taosGetDirSize(const char *path, int64_t *size);
#ifdef __cplusplus #ifdef __cplusplus
} }
#endif #endif

View File

@ -46,10 +46,12 @@ int32_t taosEOFCmd(TdCmdPtr pCmd);
void taosCloseCmd(TdCmdPtr *ppCmd); void taosCloseCmd(TdCmdPtr *ppCmd);
void *taosLoadDll(const char *filename); void *taosLoadDll(const char *fileName);
void taosCloseDll(void *handle); void taosCloseDll(void *handle);
void *taosLoadDllFunc(void *handle, const char *funcName);
int32_t taosSetConsoleEcho(bool on); int32_t taosSetConsoleEcho(bool on);
int32_t taosSetTerminalMode(); int32_t taosSetTerminalMode();

View File

@ -117,6 +117,8 @@ int32_t taosGetErrSize();
#define TSDB_CODE_REF_INVALID_ID TAOS_DEF_ERROR_CODE(0, 0x0108) // internal #define TSDB_CODE_REF_INVALID_ID TAOS_DEF_ERROR_CODE(0, 0x0108) // internal
#define TSDB_CODE_REF_ALREADY_EXIST TAOS_DEF_ERROR_CODE(0, 0x0109) // internal #define TSDB_CODE_REF_ALREADY_EXIST TAOS_DEF_ERROR_CODE(0, 0x0109) // internal
#define TSDB_CODE_REF_NOT_EXIST TAOS_DEF_ERROR_CODE(0, 0x010A) // internal #define TSDB_CODE_REF_NOT_EXIST TAOS_DEF_ERROR_CODE(0, 0x010A) // internal
#define TSDB_CODE_DLL_NOT_LOAD TAOS_DEF_ERROR_CODE(0, 0x010B)
#define TSDB_CODE_DLL_FUNC_NOT_LOAD TAOS_DEF_ERROR_CODE(0, 0x010C)
#define TSDB_CODE_APP_ERROR TAOS_DEF_ERROR_CODE(0, 0x0110) // #define TSDB_CODE_APP_ERROR TAOS_DEF_ERROR_CODE(0, 0x0110) //
#define TSDB_CODE_ACTION_IN_PROGRESS TAOS_DEF_ERROR_CODE(0, 0x0111) // internal #define TSDB_CODE_ACTION_IN_PROGRESS TAOS_DEF_ERROR_CODE(0, 0x0111) // internal
@ -514,6 +516,7 @@ int32_t taosGetErrSize();
#define TSDB_CODE_ANA_ANODE_RETURN_ERROR TAOS_DEF_ERROR_CODE(0, 0x0445) #define TSDB_CODE_ANA_ANODE_RETURN_ERROR TAOS_DEF_ERROR_CODE(0, 0x0445)
#define TSDB_CODE_ANA_ANODE_TOO_MANY_ROWS TAOS_DEF_ERROR_CODE(0, 0x0446) #define TSDB_CODE_ANA_ANODE_TOO_MANY_ROWS TAOS_DEF_ERROR_CODE(0, 0x0446)
#define TSDB_CODE_ANA_WN_DATA TAOS_DEF_ERROR_CODE(0, 0x0447) #define TSDB_CODE_ANA_WN_DATA TAOS_DEF_ERROR_CODE(0, 0x0447)
#define TSDB_CODE_ANA_INTERNAL_ERROR TAOS_DEF_ERROR_CODE(0, 0x0448)
// mnode-sma // mnode-sma
#define TSDB_CODE_MND_SMA_ALREADY_EXIST TAOS_DEF_ERROR_CODE(0, 0x0480) #define TSDB_CODE_MND_SMA_ALREADY_EXIST TAOS_DEF_ERROR_CODE(0, 0x0480)
@ -1071,6 +1074,9 @@ int32_t taosGetErrSize();
#define TSDB_CODE_VTABLE_PRIMTS_HAS_REF TAOS_DEF_ERROR_CODE(0, 0x6202) #define TSDB_CODE_VTABLE_PRIMTS_HAS_REF TAOS_DEF_ERROR_CODE(0, 0x6202)
#define TSDB_CODE_VTABLE_NOT_VIRTUAL_SUPER_TABLE TAOS_DEF_ERROR_CODE(0, 0x6203) #define TSDB_CODE_VTABLE_NOT_VIRTUAL_SUPER_TABLE TAOS_DEF_ERROR_CODE(0, 0x6203)
#define TSDB_CODE_VTABLE_NOT_SUPPORT_DATA_TYPE TAOS_DEF_ERROR_CODE(0, 0x6204) #define TSDB_CODE_VTABLE_NOT_SUPPORT_DATA_TYPE TAOS_DEF_ERROR_CODE(0, 0x6204)
#define TSDB_CODE_VTABLE_NOT_SUPPORT_STMT TAOS_DEF_ERROR_CODE(0, 0x6205)
#define TSDB_CODE_VTABLE_NOT_SUPPORT_TOPIC TAOS_DEF_ERROR_CODE(0, 0x6206)
#define TSDB_CODE_VTABLE_NOT_SUPPORT_CROSS_DB TAOS_DEF_ERROR_CODE(0, 0x6207)
#ifdef __cplusplus #ifdef __cplusplus
} }
#endif #endif

View File

@ -606,7 +606,8 @@ typedef enum ELogicConditionType {
#define TFS_MAX_LEVEL (TFS_MAX_TIERS - 1) #define TFS_MAX_LEVEL (TFS_MAX_TIERS - 1)
#define TFS_PRIMARY_LEVEL 0 #define TFS_PRIMARY_LEVEL 0
#define TFS_PRIMARY_ID 0 #define TFS_PRIMARY_ID 0
#define TFS_MIN_DISK_FREE_SIZE 50 * 1024 * 1024 #define TFS_MIN_DISK_FREE_SIZE 50 * 1024 * 1024 // 50MB
#define TFS_MIN_DISK_FREE_SIZE_MAX (2ULL * 1024 * 1024 * 1024 * 1024) // 2TB
enum { TRANS_STAT_INIT = 0, TRANS_STAT_EXECUTING, TRANS_STAT_EXECUTED, TRANS_STAT_ROLLBACKING, TRANS_STAT_ROLLBACKED }; enum { TRANS_STAT_INIT = 0, TRANS_STAT_EXECUTING, TRANS_STAT_EXECUTED, TRANS_STAT_ROLLBACKING, TRANS_STAT_ROLLBACKED };
enum { TRANS_OPER_INIT = 0, TRANS_OPER_EXECUTE, TRANS_OPER_ROLLBACK }; enum { TRANS_OPER_INIT = 0, TRANS_OPER_EXECUTE, TRANS_OPER_ROLLBACK };

View File

@ -152,10 +152,14 @@ function check_lib_path() {
# check all links # check all links
check_link ${lib_link_dir}/libtaos.so check_link ${lib_link_dir}/libtaos.so
check_link ${lib_link_dir}/libtaos.so.1 check_link ${lib_link_dir}/libtaos.so.1
check_link ${lib_link_dir}/libtaosnative.so
check_link ${lib_link_dir}/libtaosnative.so.1
if [[ -d ${lib64_link_dir} ]]; then if [[ -d ${lib64_link_dir} ]]; then
check_link ${lib64_link_dir}/libtaos.so check_link ${lib64_link_dir}/libtaos.so
check_link ${lib64_link_dir}/libtaos.so.1 check_link ${lib64_link_dir}/libtaos.so.1
check_link ${lib64_link_dir}/libtaosnative.so
check_link ${lib64_link_dir}/libtaosnative.so.1
fi fi
echo -e "Check lib path:\033[32mOK\033[0m!" echo -e "Check lib path:\033[32mOK\033[0m!"
} }

View File

@ -80,4 +80,5 @@ fi
# there can not libtaos.so*, otherwise ln -s error # there can not libtaos.so*, otherwise ln -s error
${csudo}rm -f ${install_main_dir}/driver/libtaos.* || : ${csudo}rm -f ${install_main_dir}/driver/libtaos.* || :
${csudo}rm -f ${install_main_dir}/driver/libtaosnative.* || :
[ -f ${install_main_dir}/driver/libtaosws.so ] && ${csudo}rm -f ${install_main_dir}/driver/libtaosws.so || : [ -f ${install_main_dir}/driver/libtaosws.so ] && ${csudo}rm -f ${install_main_dir}/driver/libtaosws.so || :

View File

@ -44,6 +44,8 @@ else
${csudo}rm -f ${inc_link_dir}/taosws.h || : ${csudo}rm -f ${inc_link_dir}/taosws.h || :
${csudo}rm -f ${lib_link_dir}/libtaos.* || : ${csudo}rm -f ${lib_link_dir}/libtaos.* || :
${csudo}rm -f ${lib64_link_dir}/libtaos.* || : ${csudo}rm -f ${lib64_link_dir}/libtaos.* || :
${csudo}rm -f ${lib_link_dir}/libtaosnative.* || :
${csudo}rm -f ${lib64_link_dir}/libtaosnative.* || :
${csudo}rm -f ${lib_link_dir}/libtaosws.so || : ${csudo}rm -f ${lib_link_dir}/libtaosws.so || :
${csudo}rm -f ${lib64_link_dir}/libtaosws.so || : ${csudo}rm -f ${lib64_link_dir}/libtaosws.so || :

View File

@ -31,6 +31,7 @@ mkdir -p ${pkg_dir}
cd ${pkg_dir} cd ${pkg_dir}
libfile="libtaos.so.${tdengine_ver}" libfile="libtaos.so.${tdengine_ver}"
nativelibfile="libtaosnative.so.${tdengine_ver}"
wslibfile="libtaosws.so" wslibfile="libtaosws.so"
# create install dir # create install dir
@ -120,6 +121,7 @@ fi
cp ${compile_dir}/build/bin/taos ${pkg_dir}${install_home_path}/bin cp ${compile_dir}/build/bin/taos ${pkg_dir}${install_home_path}/bin
cp ${compile_dir}/build/lib/${libfile} ${pkg_dir}${install_home_path}/driver cp ${compile_dir}/build/lib/${libfile} ${pkg_dir}${install_home_path}/driver
cp ${compile_dir}/build/lib/${nativelibfile} ${pkg_dir}${install_home_path}/driver
[ -f ${compile_dir}/build/lib/${wslibfile} ] && cp ${compile_dir}/build/lib/${wslibfile} ${pkg_dir}${install_home_path}/driver ||: [ -f ${compile_dir}/build/lib/${wslibfile} ] && cp ${compile_dir}/build/lib/${wslibfile} ${pkg_dir}${install_home_path}/driver ||:
cp ${compile_dir}/../include/client/taos.h ${pkg_dir}${install_home_path}/include cp ${compile_dir}/../include/client/taos.h ${pkg_dir}${install_home_path}/include
cp ${compile_dir}/../include/common/taosdef.h ${pkg_dir}${install_home_path}/include cp ${compile_dir}/../include/common/taosdef.h ${pkg_dir}${install_home_path}/include

View File

@ -44,6 +44,7 @@ echo version: %{_version}
echo buildroot: %{buildroot} echo buildroot: %{buildroot}
libfile="libtaos.so.%{_version}" libfile="libtaos.so.%{_version}"
nativelibfile="libtaosnative.so.%{_version}"
wslibfile="libtaosws.so" wslibfile="libtaosws.so"
# create install path, and cp file # create install path, and cp file
@ -112,6 +113,7 @@ if [ -f %{_compiledir}/build/bin/taosadapter ]; then
cp %{_compiledir}/build/bin/taosadapter %{buildroot}%{homepath}/bin cp %{_compiledir}/build/bin/taosadapter %{buildroot}%{homepath}/bin
fi fi
cp %{_compiledir}/build/lib/${libfile} %{buildroot}%{homepath}/driver cp %{_compiledir}/build/lib/${libfile} %{buildroot}%{homepath}/driver
cp %{_compiledir}/build/lib/${nativelibfile} %{buildroot}%{homepath}/driver
[ -f %{_compiledir}/build/lib/${wslibfile} ] && cp %{_compiledir}/build/lib/${wslibfile} %{buildroot}%{homepath}/driver ||: [ -f %{_compiledir}/build/lib/${wslibfile} ] && cp %{_compiledir}/build/lib/${wslibfile} %{buildroot}%{homepath}/driver ||:
cp %{_compiledir}/../include/client/taos.h %{buildroot}%{homepath}/include cp %{_compiledir}/../include/client/taos.h %{buildroot}%{homepath}/include
cp %{_compiledir}/../include/common/taosdef.h %{buildroot}%{homepath}/include cp %{_compiledir}/../include/common/taosdef.h %{buildroot}%{homepath}/include
@ -246,6 +248,8 @@ if [ $1 -eq 0 ];then
${csudo}rm -f ${inc_link_dir}/taosudf.h || : ${csudo}rm -f ${inc_link_dir}/taosudf.h || :
${csudo}rm -f ${inc_link_dir}/taows.h || : ${csudo}rm -f ${inc_link_dir}/taows.h || :
${csudo}rm -f ${lib_link_dir}/libtaos.so || : ${csudo}rm -f ${lib_link_dir}/libtaos.so || :
${csudo}rm -f ${lib_link_dir}/libtaosnative.so || :
${csudo}rm -f ${lib64_link_dir}/libtaosnative.so || :
${csudo}rm -f ${lib_link_dir}/libtaosws.so || : ${csudo}rm -f ${lib_link_dir}/libtaosws.so || :
${csudo}rm -f ${lib64_link_dir}/libtaosws.so || : ${csudo}rm -f ${lib64_link_dir}/libtaosws.so || :

View File

@ -271,17 +271,23 @@ function install_lib() {
# Remove links # Remove links
${csudo}rm -f ${lib_link_dir}/libtaos.* || : ${csudo}rm -f ${lib_link_dir}/libtaos.* || :
${csudo}rm -f ${lib64_link_dir}/libtaos.* || : ${csudo}rm -f ${lib64_link_dir}/libtaos.* || :
${csudo}rm -f ${lib_link_dir}/libtaosnative.* || :
${csudo}rm -f ${lib64_link_dir}/libtaosnative.* || :
#${csudo}rm -rf ${v15_java_app_dir} || : #${csudo}rm -rf ${v15_java_app_dir} || :
${csudo}cp -rf ${script_dir}/driver/* ${install_main_dir}/driver && ${csudo}chmod 777 ${install_main_dir}/driver/* ${csudo}cp -rf ${script_dir}/driver/* ${install_main_dir}/driver && ${csudo}chmod 777 ${install_main_dir}/driver/*
${csudo}ln -sf ${install_main_dir}/driver/libtaos.* ${lib_link_dir}/libtaos.so.1 ${csudo}ln -sf ${install_main_dir}/driver/libtaos.* ${lib_link_dir}/libtaos.so.1
${csudo}ln -sf ${lib_link_dir}/libtaos.so.1 ${lib_link_dir}/libtaos.so ${csudo}ln -sf ${lib_link_dir}/libtaos.so.1 ${lib_link_dir}/libtaos.so
${csudo}ln -sf ${install_main_dir}/driver/libtaosnative.* ${lib_link_dir}/libtaosnative.so.1
${csudo}ln -sf ${lib_link_dir}/libtaosnative.so.1 ${lib_link_dir}/libtaosnative.so
[ -f ${install_main_dir}/driver/libtaosws.so ] && ${csudo}ln -sf ${install_main_dir}/driver/libtaosws.so ${lib_link_dir}/libtaosws.so || : [ -f ${install_main_dir}/driver/libtaosws.so ] && ${csudo}ln -sf ${install_main_dir}/driver/libtaosws.so ${lib_link_dir}/libtaosws.so || :
if [[ -d ${lib64_link_dir} && ! -e ${lib64_link_dir}/libtaos.so ]]; then if [[ -d ${lib64_link_dir} && ! -e ${lib64_link_dir}/libtaos.so ]]; then
${csudo}ln -sf ${install_main_dir}/driver/libtaos.* ${lib64_link_dir}/libtaos.so.1 || : ${csudo}ln -sf ${install_main_dir}/driver/libtaos.* ${lib64_link_dir}/libtaos.so.1 || :
${csudo}ln -sf ${lib64_link_dir}/libtaos.so.1 ${lib64_link_dir}/libtaos.so || : ${csudo}ln -sf ${lib64_link_dir}/libtaos.so.1 ${lib64_link_dir}/libtaos.so || :
${csudo}ln -sf ${install_main_dir}/driver/libtaosnative.* ${lib64_link_dir}/libtaosnative.so.1 || :
${csudo}ln -sf ${lib64_link_dir}/libtaosnative.so.1 ${lib64_link_dir}/libtaosnative.so || :
[ -f ${install_main_dir}/libtaosws.so ] && ${csudo}ln -sf ${install_main_dir}/libtaosws.so ${lib64_link_dir}/libtaosws.so || : [ -f ${install_main_dir}/libtaosws.so ] && ${csudo}ln -sf ${install_main_dir}/libtaosws.so ${lib64_link_dir}/libtaosws.so || :
fi fi

View File

@ -134,6 +134,7 @@ function install_bin() {
function clean_lib() { function clean_lib() {
sudo rm -f /usr/lib/libtaos.* || : sudo rm -f /usr/lib/libtaos.* || :
sudo rm -f /usr/lib/libtaosnative.* || :
[ -f /usr/lib/libtaosws.so ] && sudo rm -f /usr/lib/libtaosws.so || : [ -f /usr/lib/libtaosws.so ] && sudo rm -f /usr/lib/libtaosws.so || :
[ -f /usr/lib64/libtaosws.so ] && sudo rm -f /usr/lib64/libtaosws.so || : [ -f /usr/lib64/libtaosws.so ] && sudo rm -f /usr/lib64/libtaosws.so || :
sudo rm -rf ${lib_dir} || : sudo rm -rf ${lib_dir} || :
@ -143,6 +144,8 @@ function install_lib() {
# Remove links # Remove links
${csudo}rm -f ${lib_link_dir}/libtaos.* || : ${csudo}rm -f ${lib_link_dir}/libtaos.* || :
${csudo}rm -f ${lib64_link_dir}/libtaos.* || : ${csudo}rm -f ${lib64_link_dir}/libtaos.* || :
${csudo}rm -f ${lib_link_dir}/libtaosnative.* || :
${csudo}rm -f ${lib64_link_dir}/libtaosnative.* || :
[ -f ${lib_link_dir}/libtaosws.so ] && ${csudo}rm -f ${lib_link_dir}/libtaosws.so || : [ -f ${lib_link_dir}/libtaosws.so ] && ${csudo}rm -f ${lib_link_dir}/libtaosws.so || :
[ -f ${lib64_link_dir}/libtaosws.so ] && ${csudo}rm -f ${lib64_link_dir}/libtaosws.so || : [ -f ${lib64_link_dir}/libtaosws.so ] && ${csudo}rm -f ${lib64_link_dir}/libtaosws.so || :
@ -154,18 +157,24 @@ function install_lib() {
if [ "$osType" != "Darwin" ]; then if [ "$osType" != "Darwin" ]; then
${csudo}ln -s ${install_main_dir}/driver/libtaos.* ${lib_link_dir}/libtaos.so.1 ${csudo}ln -s ${install_main_dir}/driver/libtaos.* ${lib_link_dir}/libtaos.so.1
${csudo}ln -s ${lib_link_dir}/libtaos.so.1 ${lib_link_dir}/libtaos.so ${csudo}ln -s ${lib_link_dir}/libtaos.so.1 ${lib_link_dir}/libtaos.so
${csudo}ln -s ${install_main_dir}/driver/libtaosnative.* ${lib_link_dir}/libtaosnative.so.1
${csudo}ln -s ${lib_link_dir}/libtaosnative.so.1 ${lib_link_dir}/libtaosnative.so
[ -f ${install_main_dir}/driver/libtaosws.so ] && ${csudo}ln -sf ${install_main_dir}/driver/libtaosws.so ${lib_link_dir}/libtaosws.so ||: [ -f ${install_main_dir}/driver/libtaosws.so ] && ${csudo}ln -sf ${install_main_dir}/driver/libtaosws.so ${lib_link_dir}/libtaosws.so ||:
if [ -d "${lib64_link_dir}" ]; then if [ -d "${lib64_link_dir}" ]; then
${csudo}ln -s ${install_main_dir}/driver/libtaos.* ${lib64_link_dir}/libtaos.so.1 || : ${csudo}ln -s ${install_main_dir}/driver/libtaos.* ${lib64_link_dir}/libtaos.so.1 || :
${csudo}ln -s ${lib64_link_dir}/libtaos.so.1 ${lib64_link_dir}/libtaos.so || : ${csudo}ln -s ${lib64_link_dir}/libtaos.so.1 ${lib64_link_dir}/libtaos.so || :
${csudo}ln -s ${install_main_dir}/driver/libtaosnative.* ${lib64_link_dir}/libtaosnative.so.1 || :
${csudo}ln -s ${lib64_link_dir}/libtaosnative.so.1 ${lib64_link_dir}/libtaosnative.so || :
[ -f ${install_main_dir}/driver/libtaosws.so ] && ${csudo}ln -sf ${install_main_dir}/driver/libtaosws.so ${lib64_link_dir}/libtaosws.so || : [ -f ${install_main_dir}/driver/libtaosws.so ] && ${csudo}ln -sf ${install_main_dir}/driver/libtaosws.so ${lib64_link_dir}/libtaosws.so || :
fi fi
else else
${csudo}ln -s ${install_main_dir}/driver/libtaos.* ${lib_link_dir}/libtaos.1.dylib ${csudo}ln -s ${install_main_dir}/driver/libtaos.* ${lib_link_dir}/libtaos.1.dylib
${csudo}ln -s ${lib_link_dir}/libtaos.1.dylib ${lib_link_dir}/libtaos.dylib ${csudo}ln -s ${lib_link_dir}/libtaos.1.dylib ${lib_link_dir}/libtaos.dylib
${csudo}ln -s ${install_main_dir}/driver/libtaosnative.* ${lib_link_dir}/libtaosnative.1.dylib
${csudo}ln -s ${lib_link_dir}/libtaosnative.1.dylib ${lib_link_dir}/libtaosnative.dylib
[ -f ${install_main_dir}/driver/libtaosws.dylib ] && ${csudo}ln -sf ${install_main_dir}/driver/libtaosws.dylib ${lib_link_dir}/libtaosws.dylib ||: [ -f ${install_main_dir}/driver/libtaosws.dylib ] && ${csudo}ln -sf ${install_main_dir}/driver/libtaosws.dylib ${lib_link_dir}/libtaosws.dylib ||:
fi fi
@ -178,7 +187,7 @@ function install_lib() {
} }
function install_header() { function install_header() {
${csudo}rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taosdef.h ${inc_link_dir}/tdef.h ${inc_link_dir}/taoserror.h ${inc_link_dir}/taosudf.h || : ${csudo}rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taosws.h ${inc_link_dir}/taosdef.h ${inc_link_dir}/tdef.h ${inc_link_dir}/taoserror.h ${inc_link_dir}/taosudf.h || :
${csudo}cp -f ${script_dir}/inc/* ${install_main_dir}/include && ${csudo}chmod 644 ${install_main_dir}/include/* ${csudo}cp -f ${script_dir}/inc/* ${install_main_dir}/include && ${csudo}chmod 644 ${install_main_dir}/include/*
${csudo}ln -s ${install_main_dir}/include/taos.h ${inc_link_dir}/taos.h ${csudo}ln -s ${install_main_dir}/include/taos.h ${inc_link_dir}/taos.h
${csudo}ln -s ${install_main_dir}/include/taosdef.h ${inc_link_dir}/taosdef.h ${csudo}ln -s ${install_main_dir}/include/taosdef.h ${inc_link_dir}/taosdef.h

View File

@ -66,6 +66,9 @@ copy %source_dir%\\include\\libs\\function\\taosudf.h %target_dir%\\include > nu
copy %binary_dir%\\build\\lib\\taos.lib %target_dir%\\driver > nul copy %binary_dir%\\build\\lib\\taos.lib %target_dir%\\driver > nul
copy %binary_dir%\\build\\lib\\taos_static.lib %target_dir%\\driver > nul copy %binary_dir%\\build\\lib\\taos_static.lib %target_dir%\\driver > nul
copy %binary_dir%\\build\\lib\\taos.dll %target_dir%\\driver > nul copy %binary_dir%\\build\\lib\\taos.dll %target_dir%\\driver > nul
copy %binary_dir%\\build\\lib\\taosnative.lib %target_dir%\\driver > nul
copy %binary_dir%\\build\\lib\\taosnative_static.lib %target_dir%\\driver > nul
copy %binary_dir%\\build\\lib\\taosnative.dll %target_dir%\\driver > nul
copy %binary_dir%\\build\\bin\\taos.exe %target_dir% > nul copy %binary_dir%\\build\\bin\\taos.exe %target_dir% > nul
if exist %binary_dir%\\build\\bin\\taosBenchmark.exe ( if exist %binary_dir%\\build\\bin\\taosBenchmark.exe (
copy %binary_dir%\\build\\bin\\taosBenchmark.exe %target_dir% > nul copy %binary_dir%\\build\\bin\\taosBenchmark.exe %target_dir% > nul
@ -149,12 +152,14 @@ call :check_svc taoskeeper
if exist c:\\windows\\sysnative ( if exist c:\\windows\\sysnative (
echo x86 echo x86
copy /y C:\\TDengine\\driver\\taos.dll %windir%\\sysnative > nul copy /y C:\\TDengine\\driver\\taos.dll %windir%\\sysnative > nul
copy /y C:\\TDengine\\driver\\taosnative.dll %windir%\\sysnative > nul
if exist C:\\TDengine\\driver\\taosws.dll ( if exist C:\\TDengine\\driver\\taosws.dll (
copy /y C:\\TDengine\\driver\\taosws.dll %windir%\\sysnative > nul copy /y C:\\TDengine\\driver\\taosws.dll %windir%\\sysnative > nul
) )
) else ( ) else (
echo x64 echo x64
copy /y C:\\TDengine\\driver\\taos.dll C:\\Windows\\System32 > nul copy /y C:\\TDengine\\driver\\taos.dll C:\\Windows\\System32 > nul
copy /y C:\\TDengine\\driver\\taosnative.dll C:\\Windows\\System32 > nul
if exist C:\\TDengine\\driver\\taosws.dll ( if exist C:\\TDengine\\driver\\taosws.dll (
copy /y C:\\TDengine\\driver\\taosws.dll C:\\Windows\\System32 > nul copy /y C:\\TDengine\\driver\\taosws.dll C:\\Windows\\System32 > nul
) )

View File

@ -313,9 +313,11 @@ function install_avro() {
function install_lib() { function install_lib() {
# Remove links # Remove links
${csudo}rm -f ${lib_link_dir}/libtaos.* || : ${csudo}rm -f ${lib_link_dir}/libtaos.* || :
${csudo}rm -f ${lib_link_dir}/libtaosnative.* || :
[ -f ${lib_link_dir}/libtaosws.so ] && ${csudo}rm -f ${lib_link_dir}/libtaosws.so || : [ -f ${lib_link_dir}/libtaosws.so ] && ${csudo}rm -f ${lib_link_dir}/libtaosws.so || :
if [ "$osType" != "Darwin" ]; then if [ "$osType" != "Darwin" ]; then
${csudo}rm -f ${lib64_link_dir}/libtaos.* || : ${csudo}rm -f ${lib64_link_dir}/libtaos.* || :
${csudo}rm -f ${lib64_link_dir}/libtaosnative.* || :
[ -f ${lib64_link_dir}/libtaosws.so ] && ${csudo}rm -f ${lib64_link_dir}/libtaosws.so || : [ -f ${lib64_link_dir}/libtaosws.so ] && ${csudo}rm -f ${lib64_link_dir}/libtaosws.so || :
fi fi
@ -324,6 +326,10 @@ function install_lib() {
${install_main_dir}/driver && ${install_main_dir}/driver &&
${csudo}chmod 777 ${install_main_dir}/driver/libtaos.so.${verNumber} ${csudo}chmod 777 ${install_main_dir}/driver/libtaos.so.${verNumber}
${csudo}cp ${binary_dir}/build/lib/libtaosnative.so.${verNumber} \
${install_main_dir}/driver &&
${csudo}chmod 777 ${install_main_dir}/driver/libtaosnative.so.${verNumber}
${csudo}ln -sf ${install_main_dir}/driver/libtaos.* ${lib_link_dir}/libtaos.so.1 > /dev/null 2>&1 ${csudo}ln -sf ${install_main_dir}/driver/libtaos.* ${lib_link_dir}/libtaos.so.1 > /dev/null 2>&1
${csudo}ln -sf ${lib_link_dir}/libtaos.so.1 ${lib_link_dir}/libtaos.so > /dev/null 2>&1 ${csudo}ln -sf ${lib_link_dir}/libtaos.so.1 ${lib_link_dir}/libtaos.so > /dev/null 2>&1
if [ -d "${lib64_link_dir}" ]; then if [ -d "${lib64_link_dir}" ]; then
@ -331,6 +337,13 @@ function install_lib() {
${csudo}ln -sf ${lib64_link_dir}/libtaos.so.1 ${lib64_link_dir}/libtaos.so > /dev/null 2>&1 ${csudo}ln -sf ${lib64_link_dir}/libtaos.so.1 ${lib64_link_dir}/libtaos.so > /dev/null 2>&1
fi fi
${csudo}ln -sf ${install_main_dir}/driver/libtaosnative.* ${lib_link_dir}/libtaosnative.so.1 > /dev/null 2>&1
${csudo}ln -sf ${lib_link_dir}/libtaosnative.so.1 ${lib_link_dir}/libtaosnative.so > /dev/null 2>&1
if [ -d "${lib64_link_dir}" ]; then
${csudo}ln -sf ${install_main_dir}/driver/libtaosnative.* ${lib64_link_dir}/libtaosnative.so.1 > /dev/null 2>&1
${csudo}ln -sf ${lib64_link_dir}/libtaosnative.so.1 ${lib64_link_dir}/libtaosnative.so > /dev/null 2>&1
fi
if [ -f ${binary_dir}/build/lib/libtaosws.so ]; then if [ -f ${binary_dir}/build/lib/libtaosws.so ]; then
${csudo}cp ${binary_dir}/build/lib/libtaosws.so \ ${csudo}cp ${binary_dir}/build/lib/libtaosws.so \
${install_main_dir}/driver && ${install_main_dir}/driver &&
@ -342,11 +355,19 @@ function install_lib() {
${csudo}cp -Rf ${binary_dir}/build/lib/libtaos.${verNumber}.dylib \ ${csudo}cp -Rf ${binary_dir}/build/lib/libtaos.${verNumber}.dylib \
${install_main_dir}/driver && ${csudo}chmod 777 ${install_main_dir}/driver/* ${install_main_dir}/driver && ${csudo}chmod 777 ${install_main_dir}/driver/*
${csudo}cp -Rf ${binary_dir}/build/lib/libtaosnative.${verNumber}.dylib \
${install_main_dir}/driver && ${csudo}chmod 777 ${install_main_dir}/driver/*
${csudo}ln -sf ${install_main_dir}/driver/libtaos.${verNumber}.dylib \ ${csudo}ln -sf ${install_main_dir}/driver/libtaos.${verNumber}.dylib \
${lib_link_dir}/libtaos.1.dylib > /dev/null 2>&1 || : ${lib_link_dir}/libtaos.1.dylib > /dev/null 2>&1 || :
${csudo}ln -sf ${install_main_dir}/driver/libtaosnative.${verNumber}.dylib \
${lib_link_dir}/libtaosnative.1.dylib > /dev/null 2>&1 || :
${csudo}ln -sf ${lib_link_dir}/libtaos.1.dylib ${lib_link_dir}/libtaos.dylib > /dev/null 2>&1 || : ${csudo}ln -sf ${lib_link_dir}/libtaos.1.dylib ${lib_link_dir}/libtaos.dylib > /dev/null 2>&1 || :
${csudo}ln -sf ${lib_link_dir}/libtaosnative.1.dylib ${lib_link_dir}/libtaosnative.dylib > /dev/null 2>&1 || :
if [ -f ${binary_dir}/build/lib/libtaosws.dylib ]; then if [ -f ${binary_dir}/build/lib/libtaosws.dylib ]; then
${csudo}cp ${binary_dir}/build/lib/libtaosws.dylib \ ${csudo}cp ${binary_dir}/build/lib/libtaosws.dylib \
${install_main_dir}/driver && ${install_main_dir}/driver &&

View File

@ -79,10 +79,12 @@ if [ "$osType" != "Darwin" ]; then
${script_dir}/get_client.sh" ${script_dir}/get_client.sh"
fi fi
lib_files="${build_dir}/lib/libtaos.so.${version}" lib_files="${build_dir}/lib/libtaos.so.${version}"
nativelib_files="${build_dir}/lib/libtaosnative.so.${version}"
wslib_files="${build_dir}/lib/libtaosws.so" wslib_files="${build_dir}/lib/libtaosws.so"
else else
bin_files="${build_dir}/bin/${clientName} ${script_dir}/remove_client.sh" bin_files="${build_dir}/bin/${clientName} ${script_dir}/remove_client.sh"
lib_files="${build_dir}/lib/libtaos.${version}.dylib" lib_files="${build_dir}/lib/libtaos.${version}.dylib"
nativelib_files="${build_dir}/lib/libtaosnative.${version}.dylib"
wslib_files="${build_dir}/lib/libtaosws.dylib" wslib_files="${build_dir}/lib/libtaosws.dylib"
fi fi
@ -224,6 +226,7 @@ fi
# Copy driver # Copy driver
mkdir -p ${install_dir}/driver mkdir -p ${install_dir}/driver
cp ${lib_files} ${install_dir}/driver cp ${lib_files} ${install_dir}/driver
cp ${nativelib_files} ${install_dir}/driver
# Copy connector # Copy connector
connector_dir="${code_dir}/connector" connector_dir="${code_dir}/connector"

View File

@ -108,9 +108,11 @@ fi
if [ "$osType" == "Darwin" ]; then if [ "$osType" == "Darwin" ]; then
lib_files="${build_dir}/lib/libtaos.${version}.dylib" lib_files="${build_dir}/lib/libtaos.${version}.dylib"
nativelib_files="${build_dir}/lib/libtaosnative.${version}.dylib"
wslib_files="${build_dir}/lib/libtaosws.dylib" wslib_files="${build_dir}/lib/libtaosws.dylib"
else else
lib_files="${build_dir}/lib/libtaos.so.${version}" lib_files="${build_dir}/lib/libtaos.so.${version}"
nativelib_files="${build_dir}/lib/libtaosnative.so.${version}"
wslib_files="${build_dir}/lib/libtaosws.so" wslib_files="${build_dir}/lib/libtaosws.so"
fi fi
header_files="${code_dir}/include/client/taos.h ${code_dir}/include/common/taosdef.h ${code_dir}/include/util/taoserror.h ${code_dir}/include/util/tdef.h ${code_dir}/include/libs/function/taosudf.h" header_files="${code_dir}/include/client/taos.h ${code_dir}/include/common/taosdef.h ${code_dir}/include/util/taoserror.h ${code_dir}/include/util/tdef.h ${code_dir}/include/libs/function/taosudf.h"
@ -332,7 +334,7 @@ if [[ $dbName == "taos" ]]; then
fi fi
# Copy driver # Copy driver
mkdir -p ${install_dir}/driver && cp ${lib_files} ${install_dir}/driver && echo "${versionComp}" >${install_dir}/driver/vercomp.txt mkdir -p ${install_dir}/driver && cp ${lib_files} ${install_dir}/driver && cp ${nativelib_files} ${install_dir}/driver && echo "${versionComp}" >${install_dir}/driver/vercomp.txt
[ -f ${wslib_files} ] && cp ${wslib_files} ${install_dir}/driver || : [ -f ${wslib_files} ] && cp ${wslib_files} ${install_dir}/driver || :
# Copy connector && taosx # Copy connector && taosx

View File

@ -205,18 +205,24 @@ function install_lib() {
log_print "start install lib from ${lib_dir} to ${lib_link_dir}" log_print "start install lib from ${lib_dir} to ${lib_link_dir}"
${csudo}rm -f ${lib_link_dir}/libtaos* || : ${csudo}rm -f ${lib_link_dir}/libtaos* || :
${csudo}rm -f ${lib64_link_dir}/libtaos* || : ${csudo}rm -f ${lib64_link_dir}/libtaos* || :
${csudo}rm -f ${lib_link_dir}/libtaosnative* || :
${csudo}rm -f ${lib64_link_dir}/libtaosnative* || :
[ -f ${lib_link_dir}/libtaosws.${lib_file_ext} ] && ${csudo}rm -f ${lib_link_dir}/libtaosws.${lib_file_ext} || : [ -f ${lib_link_dir}/libtaosws.${lib_file_ext} ] && ${csudo}rm -f ${lib_link_dir}/libtaosws.${lib_file_ext} || :
[ -f ${lib64_link_dir}/libtaosws.${lib_file_ext} ] && ${csudo}rm -f ${lib64_link_dir}/libtaosws.${lib_file_ext} || : [ -f ${lib64_link_dir}/libtaosws.${lib_file_ext} ] && ${csudo}rm -f ${lib64_link_dir}/libtaosws.${lib_file_ext} || :
${csudo}ln -s ${lib_dir}/libtaos.* ${lib_link_dir}/libtaos.${lib_file_ext_1} 2>>${install_log_path} || return 1 ${csudo}ln -s ${lib_dir}/libtaos.* ${lib_link_dir}/libtaos.${lib_file_ext_1} 2>>${install_log_path} || return 1
${csudo}ln -s ${lib_link_dir}/libtaos.${lib_file_ext_1} ${lib_link_dir}/libtaos.${lib_file_ext} 2>>${install_log_path} || return 1 ${csudo}ln -s ${lib_link_dir}/libtaos.${lib_file_ext_1} ${lib_link_dir}/libtaos.${lib_file_ext} 2>>${install_log_path} || return 1
${csudo}ln -s ${lib_dir}/libtaosnative.* ${lib_link_dir}/libtaosnative.${lib_file_ext_1} 2>>${install_log_path} || return 1
${csudo}ln -s ${lib_link_dir}/libtaosnative.${lib_file_ext_1} ${lib_link_dir}/libtaosnative.${lib_file_ext} 2>>${install_log_path} || return 1
[ -f ${lib_dir}/libtaosws.${lib_file_ext} ] && ${csudo}ln -sf ${lib_dir}/libtaosws.${lib_file_ext} ${lib_link_dir}/libtaosws.${lib_file_ext} ||: [ -f ${lib_dir}/libtaosws.${lib_file_ext} ] && ${csudo}ln -sf ${lib_dir}/libtaosws.${lib_file_ext} ${lib_link_dir}/libtaosws.${lib_file_ext} ||:
if [[ -d ${lib64_link_dir} && ! -e ${lib64_link_dir}/libtaos.${lib_file_ext} ]]; then if [[ -d ${lib64_link_dir} && ! -e ${lib64_link_dir}/libtaos.${lib_file_ext} ]]; then
${csudo}ln -s ${lib_dir}/libtaos.* ${lib64_link_dir}/libtaos.${lib_file_ext_1} 2>>${install_log_path} || return 1 ${csudo}ln -s ${lib_dir}/libtaos.* ${lib64_link_dir}/libtaos.${lib_file_ext_1} 2>>${install_log_path} || return 1
${csudo}ln -s ${lib64_link_dir}/libtaos.${lib_file_ext_1} ${lib64_link_dir}/libtaos.${lib_file_ext} 2>>${install_log_path} || return 1 ${csudo}ln -s ${lib64_link_dir}/libtaos.${lib_file_ext_1} ${lib64_link_dir}/libtaos.${lib_file_ext} 2>>${install_log_path} || return 1
${csudo}ln -s ${lib_dir}/libtaosnative.* ${lib64_link_dir}/libtaosnative.${lib_file_ext_1} 2>>${install_log_path} || return 1
${csudo}ln -s ${lib64_link_dir}/libtaosnative.${lib_file_ext_1} ${lib64_link_dir}/libtaosnative.${lib_file_ext} 2>>${install_log_path} || return 1
[ -f ${lib_dir}/libtaosws.${lib_file_ext} ] && ${csudo}ln -sf ${lib_dir}/libtaosws.${lib_file_ext} ${lib64_link_dir}/libtaosws.${lib_file_ext} 2>>${install_log_path} [ -f ${lib_dir}/libtaosws.${lib_file_ext} ] && ${csudo}ln -sf ${lib_dir}/libtaosws.${lib_file_ext} ${lib64_link_dir}/libtaosws.${lib_file_ext} 2>>${install_log_path}
fi fi

View File

@ -150,6 +150,7 @@ clean_service
# Remove all links # Remove all links
${csudo}rm -f ${bin_link_dir}/taos || : ${csudo}rm -f ${bin_link_dir}/taos || :
${csudo}rm -f ${bin_link_dir}/taosd || : ${csudo}rm -f ${bin_link_dir}/taosd || :
${csudo}rm -f ${bin_link_dir}/taosudf || :
${csudo}rm -f ${bin_link_dir}/taosadapter || : ${csudo}rm -f ${bin_link_dir}/taosadapter || :
${csudo}rm -f ${bin_link_dir}/taosBenchmark || : ${csudo}rm -f ${bin_link_dir}/taosBenchmark || :
${csudo}rm -f ${bin_link_dir}/taosdemo || : ${csudo}rm -f ${bin_link_dir}/taosdemo || :
@ -167,8 +168,10 @@ ${csudo}rm -f ${inc_link_dir}/tdef.h || :
${csudo}rm -f ${inc_link_dir}/taosudf.h || : ${csudo}rm -f ${inc_link_dir}/taosudf.h || :
${csudo}rm -f ${inc_link_dir}/taosws.h || : ${csudo}rm -f ${inc_link_dir}/taosws.h || :
${csudo}rm -f ${lib_link_dir}/libtaos.* || : ${csudo}rm -f ${lib_link_dir}/libtaos.* || :
${csudo}rm -f ${lib_link_dir}/libtaosnative.* || :
${csudo}rm -f ${lib_link_dir}/libtaosws.so || : ${csudo}rm -f ${lib_link_dir}/libtaosws.so || :
${csudo}rm -f ${lib64_link_dir}/libtaos.* || : ${csudo}rm -f ${lib64_link_dir}/libtaos.* || :
${csudo}rm -f ${lib64_link_dir}/libtaosnative.* || :
${csudo}rm -f ${lib64_link_dir}/libtaosws.so || : ${csudo}rm -f ${lib64_link_dir}/libtaosws.so || :
${csudo}rm -f ${log_link_dir} || : ${csudo}rm -f ${log_link_dir} || :

View File

@ -180,9 +180,11 @@ remove_bin() {
function clean_lib() { function clean_lib() {
# Remove link # Remove link
${csudo}rm -f ${lib_link_dir}/libtaos.* || : ${csudo}rm -f ${lib_link_dir}/libtaos.* || :
[ -f ${lib_link_dir}/libtaosnative.* ] && ${csudo}rm -f ${lib_link_dir}/libtaosnative.* || :
[ -f ${lib_link_dir}/libtaosws.* ] && ${csudo}rm -f ${lib_link_dir}/libtaosws.* || : [ -f ${lib_link_dir}/libtaosws.* ] && ${csudo}rm -f ${lib_link_dir}/libtaosws.* || :
${csudo}rm -f ${lib64_link_dir}/libtaos.* || : ${csudo}rm -f ${lib64_link_dir}/libtaos.* || :
[ -f ${lib64_link_dir}/libtaosnative.* ] && ${csudo}rm -f ${lib64_link_dir}/libtaosnative.* || :
[ -f ${lib64_link_dir}/libtaosws.* ] && ${csudo}rm -f ${lib64_link_dir}/libtaosws.* || : [ -f ${lib64_link_dir}/libtaosws.* ] && ${csudo}rm -f ${lib64_link_dir}/libtaosws.* || :
#${csudo}rm -rf ${v15_java_app_dir} || : #${csudo}rm -rf ${v15_java_app_dir} || :
} }

View File

@ -73,9 +73,11 @@ function clean_lib() {
# Remove link # Remove link
${csudo}rm -f ${lib_link_dir}/libtaos.* || : ${csudo}rm -f ${lib_link_dir}/libtaos.* || :
[ -f ${lib_link_dir}/libtaosws.* ] && ${csudo}rm -f ${lib_link_dir}/libtaosws.* || : [ -f ${lib_link_dir}/libtaosws.* ] && ${csudo}rm -f ${lib_link_dir}/libtaosws.* || :
[ -f ${lib_link_dir}/libtaosnative.* ] && ${csudo}rm -f ${lib_link_dir}/libtaosnative.* || :
${csudo}rm -f ${lib64_link_dir}/libtaos.* || : ${csudo}rm -f ${lib64_link_dir}/libtaos.* || :
[ -f ${lib64_link_dir}/libtaosws.* ] && ${csudo}rm -f ${lib64_link_dir}/libtaosws.* || : [ -f ${lib64_link_dir}/libtaosws.* ] && ${csudo}rm -f ${lib64_link_dir}/libtaosws.* || :
[ -f ${lib64_link_dir}/libtaosnative.* ] && ${csudo}rm -f ${lib64_link_dir}/libtaosnative.* || :
#${csudo}rm -rf ${v15_java_app_dir} || : #${csudo}rm -rf ${v15_java_app_dir} || :
} }

View File

@ -5,19 +5,19 @@ if(TD_ENTERPRISE)
endif() endif()
if(TD_WINDOWS) if(TD_WINDOWS)
add_library(${TAOS_LIB} SHARED ${CLIENT_SRC} ${CMAKE_CURRENT_SOURCE_DIR}/src/taos.rc.in) add_library(${TAOS_NATIVE_LIB} SHARED ${CLIENT_SRC} ${CMAKE_CURRENT_SOURCE_DIR}/src/taosnative.rc.in)
else() else()
add_library(${TAOS_LIB} SHARED ${CLIENT_SRC}) add_library(${TAOS_NATIVE_LIB} SHARED ${CLIENT_SRC})
endif() endif()
if(${TD_DARWIN}) if(${TD_DARWIN})
target_compile_options(${TAOS_LIB} PRIVATE -Wno-error=deprecated-non-prototype) target_compile_options(${TAOS_NATIVE_LIB} PRIVATE -Wno-error=deprecated-non-prototype)
endif() endif()
INCLUDE_DIRECTORIES(jni)
target_include_directories( target_include_directories(
${TAOS_LIB} ${TAOS_NATIVE_LIB}
PUBLIC "${TD_SOURCE_DIR}/include/client" PUBLIC "${TD_SOURCE_DIR}/include/client"
PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
) )
@ -26,46 +26,38 @@ if(${TAOSD_INTEGRATED})
set(TAOSD_MODULE "taosd") set(TAOSD_MODULE "taosd")
endif() endif()
target_link_libraries( target_link_libraries(
${TAOS_LIB} ${TAOS_NATIVE_LIB}
INTERFACE api INTERFACE api
PRIVATE os util common transport monitor nodes parser command planner catalog scheduler function qcom geometry ${TAOSD_MODULE} decimal PRIVATE os util common transport monitor nodes parser command planner catalog scheduler function qcom geometry ${TAOSD_MODULE} decimal
) )
if(TD_WINDOWS)
INCLUDE_DIRECTORIES(jni/windows)
INCLUDE_DIRECTORIES(jni/windows/win32)
INCLUDE_DIRECTORIES(jni/windows/win32/bridge)
else()
INCLUDE_DIRECTORIES(jni/linux)
endif()
set_target_properties( set_target_properties(
${TAOS_LIB} ${TAOS_NATIVE_LIB}
PROPERTIES PROPERTIES
CLEAN_DIRECT_OUTPUT CLEAN_DIRECT_OUTPUT
1 1
) )
set_target_properties( set_target_properties(
${TAOS_LIB} ${TAOS_NATIVE_LIB}
PROPERTIES PROPERTIES
VERSION ${TD_VER_NUMBER} VERSION ${TD_VER_NUMBER}
SOVERSION 1 SOVERSION 1
) )
add_library(${TAOS_LIB_STATIC} STATIC ${CLIENT_SRC}) add_library(${TAOS_NATIVE_LIB_STATIC} STATIC ${CLIENT_SRC})
if(${TD_DARWIN}) if(${TD_DARWIN})
target_compile_options(${TAOS_LIB_STATIC} PRIVATE -Wno-error=deprecated-non-prototype) target_compile_options(${TAOS_NATIVE_LIB_STATIC} PRIVATE -Wno-error=deprecated-non-prototype)
endif() endif()
target_include_directories( target_include_directories(
${TAOS_LIB_STATIC} ${TAOS_NATIVE_LIB_STATIC}
PUBLIC "${TD_SOURCE_DIR}/include/client" PUBLIC "${TD_SOURCE_DIR}/include/client"
PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
) )
target_link_libraries( target_link_libraries(
${TAOS_LIB_STATIC} ${TAOS_NATIVE_LIB_STATIC}
INTERFACE api INTERFACE api
PRIVATE os util common transport monitor nodes parser command planner catalog scheduler function qcom geometry decimal PRIVATE os util common transport monitor nodes parser command planner catalog scheduler function qcom geometry decimal
) )
@ -73,3 +65,5 @@ target_link_libraries(
if(${BUILD_TEST}) if(${BUILD_TEST})
ADD_SUBDIRECTORY(test) ADD_SUBDIRECTORY(test)
endif(${BUILD_TEST}) endif(${BUILD_TEST})
ADD_SUBDIRECTORY(wrapper)

View File

@ -922,7 +922,7 @@ void tscStopCrashReport() {
} }
} }
void tscWriteCrashInfo(int signum, void *sigInfo, void *context) { void taos_write_crashinfo(int signum, void *sigInfo, void *context) {
writeCrashLogToFile(signum, sigInfo, CUS_PROMPT, lastClusterId, appInfo.startTime); writeCrashLogToFile(signum, sigInfo, CUS_PROMPT, lastClusterId, appInfo.startTime);
} }
#endif #endif

View File

@ -15,10 +15,10 @@ BEGIN
BEGIN BEGIN
BLOCK "040904b0" BLOCK "040904b0"
BEGIN BEGIN
VALUE "FileDescription", "Native C Driver for TDengine" VALUE "FileDescription", "C Driver for TDengine"
VALUE "FileVersion", "${TD_VER_NUMBER}" VALUE "FileVersion", "${TD_VER_NUMBER}"
VALUE "InternalName", "taos.dll(${TD_VER_CPUTYPE})" VALUE "InternalName", "taos.dll(${TD_VER_CPUTYPE})"
VALUE "LegalCopyright", "Copyright (C) 2020 TAOS Data" VALUE "LegalCopyright", "Copyright (C) 2025 TAOS Data"
VALUE "OriginalFilename", "" VALUE "OriginalFilename", ""
VALUE "ProductName", "taos.dll(${TD_VER_CPUTYPE})" VALUE "ProductName", "taos.dll(${TD_VER_CPUTYPE})"
VALUE "ProductVersion", "${TD_VER_NUMBER}" VALUE "ProductVersion", "${TD_VER_NUMBER}"

View File

@ -0,0 +1,31 @@
1 VERSIONINFO
FILEVERSION ${TD_VER_NUMBER}
PRODUCTVERSION ${TD_VER_NUMBER}
FILEFLAGSMASK 0x17L
#ifdef _DEBUG
FILEFLAGS 0x1L
#else
FILEFLAGS 0x0L
#endif
FILEOS 0x4L
FILETYPE 0x0L
FILESUBTYPE 0x0L
BEGIN
BLOCK "StringFileInfo"
BEGIN
BLOCK "040904b0"
BEGIN
VALUE "FileDescription", "Internal C Driver for TDengine"
VALUE "FileVersion", "${TD_VER_NUMBER}"
VALUE "InternalName", "taosnative.dll(${TD_VER_CPUTYPE})"
VALUE "LegalCopyright", "Copyright (C) 2025 TAOS Data"
VALUE "OriginalFilename", ""
VALUE "ProductName", "taosnative.dll(${TD_VER_CPUTYPE})"
VALUE "ProductVersion", "${TD_VER_NUMBER}"
END
END
BLOCK "VarFileInfo"
BEGIN
VALUE "Translation", 0x409, 1200
END
END

View File

@ -8,49 +8,49 @@ AUX_SOURCE_DIRECTORY(${CMAKE_CURRENT_SOURCE_DIR} SOURCE_LIST)
ADD_EXECUTABLE(clientTest clientTests.cpp) ADD_EXECUTABLE(clientTest clientTests.cpp)
TARGET_LINK_LIBRARIES( TARGET_LINK_LIBRARIES(
clientTest clientTest
os util common transport parser catalog scheduler gtest ${TAOS_LIB_STATIC} qcom executor function os util common transport parser catalog scheduler gtest ${TAOS_NATIVE_LIB_STATIC} qcom executor function
) )
ADD_EXECUTABLE(connectOptionsTest connectOptionsTest.cpp) ADD_EXECUTABLE(connectOptionsTest connectOptionsTest.cpp)
TARGET_LINK_LIBRARIES( TARGET_LINK_LIBRARIES(
connectOptionsTest connectOptionsTest
os util common transport parser catalog scheduler gtest ${TAOS_LIB_STATIC} qcom executor function os util common transport parser catalog scheduler gtest ${TAOS_NATIVE_LIB_STATIC} qcom executor function
) )
ADD_EXECUTABLE(tmqTest tmqTest.cpp) ADD_EXECUTABLE(tmqTest tmqTest.cpp)
TARGET_LINK_LIBRARIES( TARGET_LINK_LIBRARIES(
tmqTest tmqTest
PUBLIC os util common transport parser catalog scheduler function gtest ${TAOS_LIB_STATIC} qcom PUBLIC os util common transport parser catalog scheduler function gtest ${TAOS_NATIVE_LIB_STATIC} qcom
) )
ADD_EXECUTABLE(smlTest smlTest.cpp) ADD_EXECUTABLE(smlTest smlTest.cpp)
TARGET_LINK_LIBRARIES( TARGET_LINK_LIBRARIES(
smlTest smlTest
PUBLIC os util common transport parser catalog scheduler function gtest ${TAOS_LIB_STATIC} qcom geometry PUBLIC os util common transport parser catalog scheduler function gtest ${TAOS_NATIVE_LIB_STATIC} qcom geometry
) )
#ADD_EXECUTABLE(clientMonitorTest clientMonitorTests.cpp) #ADD_EXECUTABLE(clientMonitorTest clientMonitorTests.cpp)
#TARGET_LINK_LIBRARIES( #TARGET_LINK_LIBRARIES(
# clientMonitorTest # clientMonitorTest
# PUBLIC os util common transport monitor parser catalog scheduler function gtest ${TAOS_LIB_STATIC} qcom executor # PUBLIC os util common transport monitor parser catalog scheduler function gtest ${TAOS_NATIVE_LIB_STATIC} qcom executor
#) #)
ADD_EXECUTABLE(userOperTest ../../../tests/script/api/passwdTest.c) ADD_EXECUTABLE(userOperTest ../../../tests/script/api/passwdTest.c)
TARGET_LINK_LIBRARIES( TARGET_LINK_LIBRARIES(
userOperTest userOperTest
PUBLIC ${TAOS_LIB} PUBLIC ${TAOS_NATIVE_LIB}
) )
ADD_EXECUTABLE(stmt2Test stmt2Test.cpp) ADD_EXECUTABLE(stmt2Test stmt2Test.cpp)
TARGET_LINK_LIBRARIES( TARGET_LINK_LIBRARIES(
stmt2Test stmt2Test
os util common transport parser catalog scheduler gtest ${TAOS_LIB_STATIC} qcom executor function os util common transport parser catalog scheduler gtest ${TAOS_NATIVE_LIB_STATIC} qcom executor function
) )
ADD_EXECUTABLE(stmtTest stmtTest.cpp) ADD_EXECUTABLE(stmtTest stmtTest.cpp)
TARGET_LINK_LIBRARIES( TARGET_LINK_LIBRARIES(
stmtTest stmtTest
os util common transport parser catalog scheduler gtest ${TAOS_LIB_STATIC} qcom executor function os util common transport parser catalog scheduler gtest ${TAOS_NATIVE_LIB_STATIC} qcom executor function
) )
TARGET_INCLUDE_DIRECTORIES( TARGET_INCLUDE_DIRECTORIES(

View File

@ -0,0 +1,67 @@
aux_source_directory(src WRAPPER_SRC)
if(TD_WINDOWS)
add_library(${TAOS_LIB} SHARED ${WRAPPER_SRC} ${CMAKE_CURRENT_SOURCE_DIR}/../src/taos.rc.in)
else()
add_library(${TAOS_LIB} SHARED ${WRAPPER_SRC})
endif()
if(${TD_DARWIN})
target_compile_options(${TAOS_LIB} PRIVATE -Wno-error=deprecated-non-prototype)
endif()
# jni include
INCLUDE_DIRECTORIES(jni)
if(TD_WINDOWS)
INCLUDE_DIRECTORIES(jni/windows)
INCLUDE_DIRECTORIES(jni/windows/win32)
INCLUDE_DIRECTORIES(jni/windows/win32/bridge)
else()
INCLUDE_DIRECTORIES(jni/linux)
endif()
target_include_directories(
${TAOS_LIB}
PUBLIC "${TD_SOURCE_DIR}/include/client"
PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
)
target_link_libraries(
${TAOS_LIB}
PUBLIC os util
)
set_target_properties(
${TAOS_LIB}
PROPERTIES
CLEAN_DIRECT_OUTPUT
1
)
set_target_properties(
${TAOS_LIB}
PROPERTIES
VERSION ${TD_VER_NUMBER}
SOVERSION 1
)
add_library(${TAOS_LIB_STATIC} STATIC ${WRAPPER_SRC})
if(${TD_DARWIN})
target_compile_options(${TAOS_LIB_STATIC} PRIVATE -Wno-error=deprecated-non-prototype)
endif()
target_include_directories(
${TAOS_LIB_STATIC}
PUBLIC "${TD_SOURCE_DIR}/include/client"
PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
)
target_link_libraries(
${TAOS_LIB_STATIC}
PUBLIC os util
)
# if(${BUILD_TEST})
# ADD_SUBDIRECTORY(test)
# endif(${BUILD_TEST})

View File

@ -0,0 +1,232 @@
/*
* Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
*
* This program is free software: you can use, redistribute, and/or modify
* it under the terms of the GNU Affero General Public License, version 3
* or later ("AGPL"), as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef TDENGINE_WRAPPER_H
#define TDENGINE_WRAPPER_H
#include "os.h"
#include "taos.h"
#ifdef __cplusplus
extern "C" {
#endif
typedef enum {
DRIVER_NATIVE = 0,
DRIVER_WEBSOCKET = 1,
DRIVER_MAX = 2,
} EDriverType;
extern EDriverType tsDriverType;
extern void *tsDriver;
extern int32_t taosDriverInit(EDriverType driverType);
extern void taosDriverCleanup();
extern setConfRet (*fp_taos_set_config)(const char *config);
extern int (*fp_taos_init)(void);
extern void (*fp_taos_cleanup)(void);
extern int (*fp_taos_options)(TSDB_OPTION option, const void *arg, ...);
extern int (*fp_taos_options_connection)(TAOS *taos, TSDB_OPTION_CONNECTION option, const void *arg, ...);
extern TAOS *(*fp_taos_connect)(const char *ip, const char *user, const char *pass, const char *db, uint16_t port);
extern TAOS *(*fp_taos_connect_auth)(const char *ip, const char *user, const char *auth, const char *db, uint16_t port);
extern void (*fp_taos_close)(TAOS *taos);
extern const char *(*fp_taos_data_type)(int type);
extern TAOS_STMT *(*fp_taos_stmt_init)(TAOS *taos);
extern TAOS_STMT *(*fp_taos_stmt_init_with_reqid)(TAOS *taos, int64_t reqid);
extern TAOS_STMT *(*fp_taos_stmt_init_with_options)(TAOS *taos, TAOS_STMT_OPTIONS *options);
extern int (*fp_taos_stmt_prepare)(TAOS_STMT *stmt, const char *sql, unsigned long length);
extern int (*fp_taos_stmt_set_tbname_tags)(TAOS_STMT *stmt, const char *name, TAOS_MULTI_BIND *tags);
extern int (*fp_taos_stmt_set_tbname)(TAOS_STMT *stmt, const char *name);
extern int (*fp_taos_stmt_set_tags)(TAOS_STMT *stmt, TAOS_MULTI_BIND *tags);
extern int (*fp_taos_stmt_set_sub_tbname)(TAOS_STMT *stmt, const char *name);
extern int (*fp_taos_stmt_get_tag_fields)(TAOS_STMT *stmt, int *fieldNum, TAOS_FIELD_E **fields);
extern int (*fp_taos_stmt_get_col_fields)(TAOS_STMT *stmt, int *fieldNum, TAOS_FIELD_E **fields);
extern void (*fp_taos_stmt_reclaim_fields)(TAOS_STMT *stmt, TAOS_FIELD_E *fields);
extern int (*fp_taos_stmt_is_insert)(TAOS_STMT *stmt, int *insert);
extern int (*fp_taos_stmt_num_params)(TAOS_STMT *stmt, int *nums);
extern int (*fp_taos_stmt_get_param)(TAOS_STMT *stmt, int idx, int *type, int *bytes);
extern int (*fp_taos_stmt_bind_param)(TAOS_STMT *stmt, TAOS_MULTI_BIND *bind);
extern int (*fp_taos_stmt_bind_param_batch)(TAOS_STMT *stmt, TAOS_MULTI_BIND *bind);
extern int (*fp_taos_stmt_bind_single_param_batch)(TAOS_STMT *stmt, TAOS_MULTI_BIND *bind, int colIdx);
extern int (*fp_taos_stmt_add_batch)(TAOS_STMT *stmt);
extern int (*fp_taos_stmt_execute)(TAOS_STMT *stmt);
extern TAOS_RES *(*fp_taos_stmt_use_result)(TAOS_STMT *stmt);
extern int (*fp_taos_stmt_close)(TAOS_STMT *stmt);
extern char *(*fp_taos_stmt_errstr)(TAOS_STMT *stmt);
extern int (*fp_taos_stmt_affected_rows)(TAOS_STMT *stmt);
extern int (*fp_taos_stmt_affected_rows_once)(TAOS_STMT *stmt);
extern TAOS_STMT2 *(*fp_taos_stmt2_init)(TAOS *taos, TAOS_STMT2_OPTION *option);
extern int (*fp_taos_stmt2_prepare)(TAOS_STMT2 *stmt, const char *sql, unsigned long length);
extern int (*fp_taos_stmt2_bind_param)(TAOS_STMT2 *stmt, TAOS_STMT2_BINDV *bindv, int32_t col_idx);
extern int (*fp_taos_stmt2_bind_param_a)(TAOS_STMT2 *stmt, TAOS_STMT2_BINDV *bindv, int32_t col_idx,
__taos_async_fn_t fp, void *param);
extern int (*fp_taos_stmt2_exec)(TAOS_STMT2 *stmt, int *affected_rows);
extern int (*fp_taos_stmt2_close)(TAOS_STMT2 *stmt);
extern int (*fp_taos_stmt2_is_insert)(TAOS_STMT2 *stmt, int *insert);
extern int (*fp_taos_stmt2_get_fields)(TAOS_STMT2 *stmt, int *count, TAOS_FIELD_ALL **fields);
extern void (*fp_taos_stmt2_free_fields)(TAOS_STMT2 *stmt, TAOS_FIELD_ALL *fields);
extern TAOS_RES *(*fp_taos_stmt2_result)(TAOS_STMT2 *stmt);
extern char *(*fp_taos_stmt2_error)(TAOS_STMT2 *stmt);
extern TAOS_RES *(*fp_taos_query)(TAOS *taos, const char *sql);
extern TAOS_RES *(*fp_taos_query_with_reqid)(TAOS *taos, const char *sql, int64_t reqId);
extern TAOS_ROW (*fp_taos_fetch_row)(TAOS_RES *res);
extern int (*fp_taos_result_precision)(TAOS_RES *res); // get the time precision of result
extern void (*fp_taos_free_result)(TAOS_RES *res);
extern void (*fp_taos_kill_query)(TAOS *taos);
extern int (*fp_taos_field_count)(TAOS_RES *res);
extern int (*fp_taos_num_fields)(TAOS_RES *res);
extern int (*fp_taos_affected_rows)(TAOS_RES *res);
extern int64_t (*fp_taos_affected_rows64)(TAOS_RES *res);
extern TAOS_FIELD *(*fp_taos_fetch_fields)(TAOS_RES *res);
extern TAOS_FIELD_E *(*fp_taos_fetch_fields_e)(TAOS_RES *res);
extern int (*fp_taos_select_db)(TAOS *taos, const char *db);
extern int (*fp_taos_print_row)(char *str, TAOS_ROW row, TAOS_FIELD *fields, int num_fields);
extern int (*fp_taos_print_row_with_size)(char *str, uint32_t size, TAOS_ROW row, TAOS_FIELD *fields, int num_fields);
extern void (*fp_taos_stop_query)(TAOS_RES *res);
extern bool (*fp_taos_is_null)(TAOS_RES *res, int32_t row, int32_t col);
extern int (*fp_taos_is_null_by_column)(TAOS_RES *res, int columnIndex, bool result[], int *rows);
extern bool (*fp_taos_is_update_query)(TAOS_RES *res);
extern int (*fp_taos_fetch_block)(TAOS_RES *res, TAOS_ROW *rows);
extern int (*fp_taos_fetch_block_s)(TAOS_RES *res, int *numOfRows, TAOS_ROW *rows);
extern int (*fp_taos_fetch_raw_block)(TAOS_RES *res, int *numOfRows, void **pData);
extern int *(*fp_taos_get_column_data_offset)(TAOS_RES *res, int columnIndex);
extern int (*fp_taos_validate_sql)(TAOS *taos, const char *sql);
extern void (*fp_taos_reset_current_db)(TAOS *taos);
extern int *(*fp_taos_fetch_lengths)(TAOS_RES *res);
extern TAOS_ROW *(*fp_taos_result_block)(TAOS_RES *res);
extern const char *(*fp_taos_get_server_info)(TAOS *taos);
extern const char *(*fp_taos_get_client_info)();
extern int (*fp_taos_get_current_db)(TAOS *taos, char *database, int len, int *required);
extern const char *(*fp_taos_errstr)(TAOS_RES *res);
extern int (*fp_taos_errno)(TAOS_RES *res);
extern void (*fp_taos_query_a)(TAOS *taos, const char *sql, __taos_async_fn_t fp, void *param);
extern void (*fp_taos_query_a_with_reqid)(TAOS *taos, const char *sql, __taos_async_fn_t fp, void *param,
int64_t reqid);
extern void (*fp_taos_fetch_rows_a)(TAOS_RES *res, __taos_async_fn_t fp, void *param);
extern void (*fp_taos_fetch_raw_block_a)(TAOS_RES *res, __taos_async_fn_t fp, void *param);
extern const void *(*fp_taos_get_raw_block)(TAOS_RES *res);
extern int (*fp_taos_get_db_route_info)(TAOS *taos, const char *db, TAOS_DB_ROUTE_INFO *dbInfo);
extern int (*fp_taos_get_table_vgId)(TAOS *taos, const char *db, const char *table, int *vgId);
extern int (*fp_taos_get_tables_vgId)(TAOS *taos, const char *db, const char *table[], int tableNum, int *vgId);
extern int (*fp_taos_load_table_info)(TAOS *taos, const char *tableNameList);
extern void (*fp_taos_set_hb_quit)(int8_t quitByKill);
extern int (*fp_taos_set_notify_cb)(TAOS *taos, __taos_notify_fn_t fp, void *param, int type);
extern void (*fp_taos_fetch_whitelist_a)(TAOS *taos, __taos_async_whitelist_fn_t fp, void *param);
extern int (*fp_taos_set_conn_mode)(TAOS *taos, int mode, int value);
extern TAOS_RES *(*fp_taos_schemaless_insert)(TAOS *taos, char *lines[], int numLines, int protocol, int precision);
extern TAOS_RES *(*fp_taos_schemaless_insert_with_reqid)(TAOS *taos, char *lines[], int numLines, int protocol,
int precision, int64_t reqid);
extern TAOS_RES *(*fp_taos_schemaless_insert_raw)(TAOS *taos, char *lines, int len, int32_t *totalRows, int protocol,
int precision);
extern TAOS_RES *(*fp_taos_schemaless_insert_raw_with_reqid)(TAOS *taos, char *lines, int len, int32_t *totalRows,
int protocol, int precision, int64_t reqid);
extern TAOS_RES *(*fp_taos_schemaless_insert_ttl)(TAOS *taos, char *lines[], int numLines, int protocol, int precision,
int32_t ttl);
extern TAOS_RES *(*fp_taos_schemaless_insert_ttl_with_reqid)(TAOS *taos, char *lines[], int numLines, int protocol,
int precision, int32_t ttl, int64_t reqid);
extern TAOS_RES *(*fp_taos_schemaless_insert_raw_ttl)(TAOS *taos, char *lines, int len, int32_t *totalRows,
int protocol, int precision, int32_t ttl);
extern TAOS_RES *(*fp_taos_schemaless_insert_raw_ttl_with_reqid)(TAOS *taos, char *lines, int len, int32_t *totalRows,
int protocol, int precision, int32_t ttl,
int64_t reqid);
extern TAOS_RES *(*fp_taos_schemaless_insert_raw_ttl_with_reqid_tbname_key)(TAOS *taos, char *lines, int len,
int32_t *totalRows, int protocol,
int precision, int32_t ttl, int64_t reqid,
char *tbnameKey);
extern TAOS_RES *(*fp_taos_schemaless_insert_ttl_with_reqid_tbname_key)(TAOS *taos, char *lines[], int numLines,
int protocol, int precision, int32_t ttl,
int64_t reqid, char *tbnameKey);
extern tmq_conf_t *(*fp_tmq_conf_new)();
extern tmq_conf_res_t (*fp_tmq_conf_set)(tmq_conf_t *conf, const char *key, const char *value);
extern void (*fp_tmq_conf_destroy)(tmq_conf_t *conf);
extern void (*fp_tmq_conf_set_auto_commit_cb)(tmq_conf_t *conf, tmq_commit_cb *cb, void *param);
extern tmq_list_t *(*fp_tmq_list_new)();
extern int32_t (*fp_tmq_list_append)(tmq_list_t *, const char *);
extern void (*fp_tmq_list_destroy)(tmq_list_t *);
extern int32_t (*fp_tmq_list_get_size)(const tmq_list_t *);
extern char **(*fp_tmq_list_to_c_array)(const tmq_list_t *);
extern tmq_t *(*fp_tmq_consumer_new)(tmq_conf_t *conf, char *errstr, int32_t errstrLen);
extern int32_t (*fp_tmq_subscribe)(tmq_t *tmq, const tmq_list_t *topic_list);
extern int32_t (*fp_tmq_unsubscribe)(tmq_t *tmq);
extern int32_t (*fp_tmq_subscription)(tmq_t *tmq, tmq_list_t **topics);
extern TAOS_RES *(*fp_tmq_consumer_poll)(tmq_t *tmq, int64_t timeout);
extern int32_t (*fp_tmq_consumer_close)(tmq_t *tmq);
extern int32_t (*fp_tmq_commit_sync)(tmq_t *tmq, const TAOS_RES *msg);
extern void (*fp_tmq_commit_async)(tmq_t *tmq, const TAOS_RES *msg, tmq_commit_cb *cb, void *param);
extern int32_t (*fp_tmq_commit_offset_sync)(tmq_t *tmq, const char *pTopicName, int32_t vgId, int64_t offset);
extern void (*fp_tmq_commit_offset_async)(tmq_t *tmq, const char *pTopicName, int32_t vgId, int64_t offset,
tmq_commit_cb *cb, void *param);
extern int32_t (*fp_tmq_get_topic_assignment)(tmq_t *tmq, const char *pTopicName, tmq_topic_assignment **assignment,
int32_t *numOfAssignment);
extern void (*fp_tmq_free_assignment)(tmq_topic_assignment *pAssignment);
extern int32_t (*fp_tmq_offset_seek)(tmq_t *tmq, const char *pTopicName, int32_t vgId, int64_t offset);
extern int64_t (*fp_tmq_position)(tmq_t *tmq, const char *pTopicName, int32_t vgId);
extern int64_t (*fp_tmq_committed)(tmq_t *tmq, const char *pTopicName, int32_t vgId);
extern TAOS *(*fp_tmq_get_connect)(tmq_t *tmq);
extern const char *(*fp_tmq_get_table_name)(TAOS_RES *res);
extern tmq_res_t (*fp_tmq_get_res_type)(TAOS_RES *res);
extern const char *(*fp_tmq_get_topic_name)(TAOS_RES *res);
extern const char *(*fp_tmq_get_db_name)(TAOS_RES *res);
extern int32_t (*fp_tmq_get_vgroup_id)(TAOS_RES *res);
extern int64_t (*fp_tmq_get_vgroup_offset)(TAOS_RES *res);
extern const char *(*fp_tmq_err2str)(int32_t code);
extern int32_t (*fp_tmq_get_raw)(TAOS_RES *res, tmq_raw_data *raw);
extern int32_t (*fp_tmq_write_raw)(TAOS *taos, tmq_raw_data raw);
extern int (*fp_taos_write_raw_block)(TAOS *taos, int numOfRows, char *pData, const char *tbname);
extern int (*fp_taos_write_raw_block_with_reqid)(TAOS *taos, int numOfRows, char *pData, const char *tbname,
int64_t reqid);
extern int (*fp_taos_write_raw_block_with_fields)(TAOS *taos, int rows, char *pData, const char *tbname,
TAOS_FIELD *fields, int numFields);
extern int (*fp_taos_write_raw_block_with_fields_with_reqid)(TAOS *taos, int rows, char *pData, const char *tbname,
TAOS_FIELD *fields, int numFields, int64_t reqid);
extern void (*fp_tmq_free_raw)(tmq_raw_data raw);
extern char *(*fp_tmq_get_json_meta)(TAOS_RES *res);
extern void (*fp_tmq_free_json_meta)(char *jsonMeta);
extern TSDB_SERVER_STATUS (*fp_taos_check_server_status)(const char *fqdn, int port, char *details, int maxlen);
extern void (*fp_taos_write_crashinfo)(int signum, void *sigInfo, void *context);
extern char *(*fp_getBuildInfo)();
#ifdef __cplusplus
}
#endif
#endif // TDENGINE_CLIENT_WRAPPER_H

View File

@ -14,107 +14,6 @@
extern "C" { extern "C" {
#endif #endif
/*
* AWT native interface (new in JDK 1.3)
*
* The AWT native interface allows a native C or C++ application a means
* by which to access native structures in AWT. This is to facilitate moving
* legacy C and C++ applications to Java and to target the needs of the
* community who, at present, wish to do their own native rendering to canvases
* for performance reasons. Standard extensions such as Java3D also require a
* means to access the underlying native data structures of AWT.
*
* There may be future extensions to this API depending on demand.
*
* A VM does not have to implement this API in order to pass the JCK.
* It is recommended, however, that this API is implemented on VMs that support
* standard extensions, such as Java3D.
*
* Since this is a native API, any program which uses it cannot be considered
* 100% pure java.
*/
/*
* AWT Native Drawing Surface (JAWT_DrawingSurface).
*
* For each platform, there is a native drawing surface structure. This
* platform-specific structure can be found in jawt_md.h. It is recommended
* that additional platforms follow the same model. It is also recommended
* that VMs on Win32 and Solaris support the existing structures in jawt_md.h.
*
*******************
* EXAMPLE OF USAGE:
*******************
*
* In Win32, a programmer wishes to access the HWND of a canvas to perform
* native rendering into it. The programmer has declared the paint() method
* for their canvas subclass to be native:
*
*
* MyCanvas.java:
*
* import java.awt.*;
*
* public class MyCanvas extends Canvas {
*
* static {
* System.loadLibrary("mylib");
* }
*
* public native void paint(Graphics g);
* }
*
*
* myfile.c:
*
* #include "jawt_md.h"
* #include <assert.h>
*
* JNIEXPORT void JNICALL
* Java_MyCanvas_paint(JNIEnv* env, jobject canvas, jobject graphics)
* {
* JAWT awt;
* JAWT_DrawingSurface* ds;
* JAWT_DrawingSurfaceInfo* dsi;
* JAWT_Win32DrawingSurfaceInfo* dsi_win;
* jboolean result;
* jint lock;
*
* // Get the AWT
* awt.version = JAWT_VERSION_1_3;
* result = JAWT_GetAWT(env, &awt);
* assert(result != JNI_FALSE);
*
* // Get the drawing surface
* ds = awt.GetDrawingSurface(env, canvas);
* assert(ds != NULL);
*
* // Lock the drawing surface
* lock = ds->Lock(ds);
* assert((lock & JAWT_LOCK_ERROR) == 0);
*
* // Get the drawing surface info
* dsi = ds->GetDrawingSurfaceInfo(ds);
*
* // Get the platform-specific drawing info
* dsi_win = (JAWT_Win32DrawingSurfaceInfo*)dsi->platformInfo;
*
* //////////////////////////////
* // !!! DO PAINTING HERE !!! //
* //////////////////////////////
*
* // Free the drawing surface info
* ds->FreeDrawingSurfaceInfo(dsi);
*
* // Unlock the drawing surface
* ds->Unlock(ds);
*
* // Free the drawing surface
* awt.FreeDrawingSurface(ds);
* }
*
*/
/* /*
* JAWT_Rectangle * JAWT_Rectangle
* Structure for a native rectangle. * Structure for a native rectangle.

View File

@ -52,87 +52,6 @@ extern "C" {
* 100% pure java. * 100% pure java.
*/ */
/*
* AWT Native Drawing Surface (JAWT_DrawingSurface).
*
* For each platform, there is a native drawing surface structure. This
* platform-specific structure can be found in jawt_md.h. It is recommended
* that additional platforms follow the same model. It is also recommended
* that VMs on Win32 and Solaris support the existing structures in jawt_md.h.
*
*******************
* EXAMPLE OF USAGE:
*******************
*
* In Win32, a programmer wishes to access the HWND of a canvas to perform
* native rendering into it. The programmer has declared the paint() method
* for their canvas subclass to be native:
*
*
* MyCanvas.java:
*
* import java.awt.*;
*
* public class MyCanvas extends Canvas {
*
* static {
* System.loadLibrary("mylib");
* }
*
* public native void paint(Graphics g);
* }
*
*
* myfile.c:
*
* #include "jawt_md.h"
* #include <assert.h>
*
* JNIEXPORT void JNICALL
* Java_MyCanvas_paint(JNIEnv* env, jobject canvas, jobject graphics)
* {
* JAWT awt;
* JAWT_DrawingSurface* ds;
* JAWT_DrawingSurfaceInfo* dsi;
* JAWT_Win32DrawingSurfaceInfo* dsi_win;
* jboolean result;
* jint lock;
*
* // Get the AWT
* awt.version = JAWT_VERSION_1_3;
* result = JAWT_GetAWT(env, &awt);
* assert(result != JNI_FALSE);
*
* // Get the drawing surface
* ds = awt.GetDrawingSurface(env, canvas);
* assert(ds != NULL);
*
* // Lock the drawing surface
* lock = ds->Lock(ds);
* assert((lock & JAWT_LOCK_ERROR) == 0);
*
* // Get the drawing surface info
* dsi = ds->GetDrawingSurfaceInfo(ds);
*
* // Get the platform-specific drawing info
* dsi_win = (JAWT_Win32DrawingSurfaceInfo*)dsi->platformInfo;
*
* //////////////////////////////
* // !!! DO PAINTING HERE !!! //
* //////////////////////////////
*
* // Free the drawing surface info
* ds->FreeDrawingSurfaceInfo(dsi);
*
* // Unlock the drawing surface
* ds->Unlock(ds);
*
* // Free the drawing surface
* awt.FreeDrawingSurface(ds);
* }
*
*/
/* /*
* JAWT_Rectangle * JAWT_Rectangle
* Structure for a native rectangle. * Structure for a native rectangle.

Some files were not shown because too many files have changed in this diff Show More