Compare commits

..

2 Commits

Author SHA1 Message Date
liukj 5eec53b62b docs: update v1.27.0 release note 2025-01-25 15:49:22 +08:00
liukj 9a585bf1e7 docs: rebuild v1.27.0 docs 2025-01-25 15:38:15 +08:00
125 changed files with 754 additions and 3546 deletions

View File

@ -16,16 +16,14 @@ on:
branches: [ "main" ]
permissions:
# Required to upload SARIF file to CodeQL. See: https://github.com/github/codeql-action/issues/2117
actions: read
# Require writing security events to upload SARIF file to security tab
security-events: write
# Only need to read contents
# Read commit contents
contents: read
jobs:
scan-pr:
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v1.9.1"
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@1f1242919d8a60496dd1874b24b62b2370ed4c78" # v1.7.1
with:
# Example of specifying custom arguments
scan-args: |-

View File

@ -242,7 +242,7 @@ POST $[[SETUP_INDEX_PREFIX]]alert-rule/$[[SETUP_DOC_TYPE]]/builtin-cal8n7p7h710d
"bool": {
"must": [
{
"term": {
"match": {
"payload.elasticsearch.cluster_health.status": "red"
}
},
@ -400,7 +400,7 @@ POST $[[SETUP_INDEX_PREFIX]]alert-rule/$[[SETUP_DOC_TYPE]]/builtin-calavvp7h710d
".infini_metrics"
],
"filter": {},
"raw_filter": {"bool":{"must":[{"term":{"payload.elasticsearch.index_health.status":"red"}},{"term":{"metadata.name":{"value":"index_health"}}}]}},
"raw_filter": {"bool":{"must":[{"match":{"payload.elasticsearch.index_health.status":"red"}},{"term":{"metadata.name":{"value":"index_health"}}}]}},
"time_field": "timestamp",
"context": {
"fields": null
@ -1760,158 +1760,4 @@ POST $[[SETUP_INDEX_PREFIX]]alert-rule/$[[SETUP_DOC_TYPE]]/builtin-cal8n7p7h710d
"name": "$[[SETUP_USERNAME]]",
"id": "$[[SETUP_USER_ID]]"
}
}
POST $[[SETUP_INDEX_PREFIX]]alert-rule/$[[SETUP_DOC_TYPE]]/builtin-cujivv5ath26drn6bcl0
{
"id": "builtin-cujivv5ath26drn6bcl0",
"created": "2025-02-08T18:20:44.273334+08:00",
"updated": "2025-02-12T16:31:05.672771+08:00",
"name": "Cluster Metrics Collection Anomaly",
"enabled": true,
"resource": {
"resource_id": "$[[SETUP_RESOURCE_ID]]",
"resource_name": "$[[SETUP_RESOURCE_NAME]]",
"type": "elasticsearch",
"objects": [
".infini_metrics*"
],
"filter": {},
"raw_filter": {
"bool": {
"must": [
{
"terms": {
"metadata.name": [
"cluster_health",
"cluster_stats",
"index_stats",
"node_stats",
"shard_stats"
]
}
}
]
}
},
"time_field": "timestamp",
"context": {
"fields": null
}
},
"metrics": {
"bucket_size": "1m",
"groups": [
{
"field": "metadata.labels.cluster_id",
"limit": 5
},
{
"field": "metadata.name",
"limit": 5
}
],
"formula": "a",
"items": [
{
"name": "a",
"field": "agent.id",
"statistic": "count"
}
],
"bucket_label": {
"enabled": false
},
"expression": "count(agent.id)"
},
"bucket_conditions": {
"operator": "any",
"items": [
{
"minimum_period_match": 1,
"operator": "lt",
"values": [
"0"
],
"priority": "critical",
"type": "content",
"bucket_count": 10
}
]
},
"notification_config": {
"enabled": true,
"title": "🔥 [{{.rule_name}}] Alerting",
"message": "{{range .results}}\n{{$cn := lookup \"category=metadata, object=cluster, property=name, default=N/A\" (index .group_values 0) }}\n{{$cu := printf \"%s/#/cluster/monitor/elasticsearch/%s\" $.env.INFINI_CONSOLE_ENDPOINT (index .group_values 0)}}\nCluster [[{{$cn}}]({{$cu}}?_g=%7B%22timeRange%22:%7B%22min%22:%22{{$.min}}%22%2C%22max%22:%22{{$.max}}%22%7D%7D)] ({{index .group_values 1}}) metrics has dropped at {{.issue_timestamp | datetime}};\n{{end}}",
"normal": [
{
"id": "cgnb2nt3q95nmusjl65g",
"enabled": true
},
{
"id": "cgiospt3q95q49k3u00g",
"enabled": true
},
{
"id": "cj865st3q95rega919ig",
"enabled": true
},
{
"id": "cgnb2r53q95nmusjl6vg",
"enabled": true
},
{
"id": "ch1os6t3q95lk6lepkq0",
"enabled": true
},
{
"id": "cgnb2kt3q95nmusjl64g",
"enabled": true
}
],
"throttle_period": "6h",
"accept_time_range": {
"start": "00:00",
"end": "23:59"
}
},
"category": "Platform",
"recovery_notification_config": {
"enabled": true,
"title": "🌈 [{{.rule_name}}] Resolved",
"message": "EventID: {{.event_id}} \nTarget: {{.resource_name}}-{{.objects}} \nTriggerAt: {{.trigger_at | datetime}} \nResolveAt: {{.timestamp | datetime}} \nDuration: {{.duration}} ",
"normal": [
{
"id": "cj8bq8d3q95ogankugqg",
"enabled": true
},
{
"id": "cj8ctat3q95l9ebbntlg",
"enabled": true
},
{
"id": "cj8atf53q95lhahebg8g",
"enabled": true
},
{
"id": "cj8e9s53q95gsdbb054g",
"enabled": true
},
{
"id": "cj8e9gt3q95gsdbb0170",
"enabled": true
},
{
"id": "cj86l0l3q95rrpfea6ug",
"enabled": true
}
],
"event_enabled": true
},
"schedule": {
"interval": "1m"
},
"creator": {
"name": "$[[SETUP_USERNAME]]",
"id": "$[[SETUP_USER_ID]]"
}
}

View File

@ -1,22 +1,22 @@
path.data: data
path.logs: log
allow_multi_instance: false
configs.auto_reload: true
allow_multi_instance: true
configs.auto_reload: false
entry:
- name: agent_es_entry
- name: my_es_entry
enabled: true
router: agent_metrics_router
router: my_router
max_concurrency: 200000
network:
binding: 0.0.0.0:8765
tls: #for mTLS connection with config servers
enabled: true
binding: 0.0.0.0:8081
# tls: #for mTLS connection with config servers
# enabled: true
# ca_file: /xxx/ca.crt
# cert_file: /xxx/server.crt
# key_file: /xxx/server.key
skip_insecure_verify: false
# skip_insecure_verify: false
flow:
- name: deny_flow
@ -28,7 +28,7 @@ flow:
filter:
- basic_auth:
valid_users:
$[[SETUP_AGENT_USERNAME]]: $[[SETUP_AGENT_PASSWORD]]
ingest: n
- rewrite_to_bulk:
type_removed: false
- bulk_request_mutate:
@ -50,7 +50,7 @@ flow:
fix_null_id: true
router:
- name: agent_metrics_router
- name: my_router
default_flow: deny_flow
rules:
- method:
@ -65,8 +65,8 @@ elasticsearch:
- name: prod
enabled: true
basic_auth:
username: $[[SETUP_AGENT_USERNAME]]
password: $[[SETUP_AGENT_PASSWORD]]
username: ingest
password: password
endpoints: $[[SETUP_ENDPOINTS]]
pipeline:

View File

@ -52,8 +52,8 @@ pipeline:
Content-Type: application/json
body: $[[message]]
basic_auth:
username: $[[SETUP_AGENT_USERNAME]]
password: $[[SETUP_AGENT_PASSWORD]]
username: $[[SETUP_ES_USERNAME]]
password: $[[SETUP_ES_PASSWORD]]
# tls: #for mTLS connection with config servers
# enabled: true
# ca_file: /xxx/ca.crt

View File

@ -6,8 +6,6 @@ elasticsearch:
name: $[[TASK_ID]]
cluster_uuid: $[[CLUSTER_UUID]]
enabled: true
distribution: $[[CLUSTER_DISTRIBUTION]]
version: $[[CLUSTER_VERSION]]
endpoints: $[[CLUSTER_ENDPOINT]]
discovery:
enabled: false
@ -34,7 +32,6 @@ pipeline:
labels:
cluster_id: $[[CLUSTER_ID]]
cluster_uuid: $[[CLUSTER_UUID]]
cluster_name: $[[CLUSTER_NAME]]
when:
cluster_available: ["$[[TASK_ID]]"]
@ -50,7 +47,6 @@ pipeline:
labels:
cluster_id: $[[CLUSTER_ID]]
cluster_uuid: $[[CLUSTER_UUID]]
cluster_name: $[[CLUSTER_NAME]]
logs_path: $[[NODE_LOGS_PATH]]
queue_name: logs
when:

View File

@ -28,19 +28,6 @@ PUT _template/$[[SETUP_TEMPLATE_NAME]]
}
},
"mappings": {
"properties": {
"metadata": {
"properties": {
"labels": {
"properties": {
"cluster_id": {
"type": "keyword"
}
}
}
}
}
},
"dynamic_templates": [
{
"strings": {
@ -381,12 +368,6 @@ PUT _template/$[[SETUP_INDEX_PREFIX]]alert-history-rollover
}
},
"mappings" : {
"properties" : {
"condition_result" : {
"type" : "object",
"enabled" : false
}
},
"dynamic_templates" : [
{
"strings" : {

View File

@ -334,8 +334,7 @@ PUT /_cluster/settings
"rollup": {
"search": {
"enabled": "true"
},
"hours_before": "24"
}
}
}
}
@ -358,39 +357,5 @@ PUT /.easysearch-ilm-config/_settings
}
}
# ilm settings for rollup indices
DELETE _ilm/policy/ilm_$[[SETUP_INDEX_PREFIX]]rollup-30days-retention
PUT _ilm/policy/ilm_$[[SETUP_INDEX_PREFIX]]rollup-30days-retention
{
"policy": {
"phases": {
"hot": {
"min_age": "0ms"
},
"delete": {
"min_age": "30d",
"actions": {
"delete": {
"timestamp_field": "timestamp.date_histogram",
"min_data_age": "30d"
}
}
}
}
}
}
# add ilm policy to rollup indices
PUT _template/rollup_policy_template
{
"order": 1,
"index_patterns": ["rollup*"],
"settings": {
"index.lifecycle.name": "ilm_$[[SETUP_INDEX_PREFIX]]rollup-30days-retention"
}
}
# start all rollup jobs
POST /_rollup/jobs/rollup*/_start

View File

@ -27,19 +27,6 @@ PUT _template/$[[SETUP_TEMPLATE_NAME]]
}
},
"mappings": {
"properties": {
"metadata": {
"properties": {
"labels": {
"properties": {
"cluster_id": {
"type": "keyword"
}
}
}
}
}
},
"dynamic_templates": [
{
"strings": {

View File

@ -27,19 +27,6 @@ PUT _template/$[[SETUP_TEMPLATE_NAME]]
}
},
"mappings": {
"properties": {
"metadata": {
"properties": {
"labels": {
"properties": {
"cluster_id": {
"type": "keyword"
}
}
}
}
}
},
"dynamic_templates": [
{
"strings": {

View File

@ -104,7 +104,6 @@ security:
# group_attribute: "ou"
# bypass_api_key: true
# cache_ttl: "10s"
# default_roles: ["ReadonlyUI","DATA"] #default for all ldap users if no specify roles was defined
# role_mapping:
# group:
# superheros: [ "Administrator" ]
@ -119,7 +118,6 @@ security:
# base_dn: "dc=example,dc=com"
# user_filter: "(uid=%s)"
# cache_ttl: "10s"
# default_roles: ["ReadonlyUI","DATA"] #default for all ldap users if no specify roles was defined
# role_mapping:
# uid:
# tesla: [ "readonly","data" ]

View File

@ -31,21 +31,16 @@ Click the `New` button in the alerting rule list to enter the new alerting rule
### Configure alerting metrics and trigger conditions
{{% load-img "/img/screenshot/2025/alerting/rule_condition.png" "alerting rule new" %}}
{{% load-img "/img/screenshot/20220715-alerting-rule-new-metric.jpg" "alerting rule new" %}}
- Input the rule name
- Add the grouped fields and group size as needed, you can add more than one for terms aggregation
- Select the metrics aggregation field and statistics type, you can configure more than one, when configuring more than one, you must configure a formula to calculate the final metrics
- Configure alerting trigger conditions
- Select **Metrics value**
- Select **Bucket diff**
- Select based on **Doc diff** or **Content diff**
>**Doc diff**: The difference in the number of matching documents between two adjacent time buckets
>**Content diff**: Whether theres a change in a group between two adjacent time buckets. A difference value of 1 indicates an increase, -1 indicates a decrease, and 0 indicates no change
- Select execution check cycle
- Input the title of the alerting event (template, referenced by the title in the template variable, click here to learn about [template syntax](./variables) )
- Input alerting event message (template, referenced by message in template variable, click here for [template syntax](./variables) )
>**Bucket Diff** is a feature introduced in INFINI Console version 1.28.2. It can be used to detect differences in data across different time periods, such as checking if theres an abnormal change in data volume during a specific time window.
### Configure message notification
{{% load-img "/img/screenshot/20220715-alerting-rule-new-notification.jpg" "alerting rule new" %}}

View File

@ -7,119 +7,18 @@ title: "Release Notes"
Information about release notes of INFINI Console is provided here.
## Latest (In development)
### Breaking changes
### Features
- Add Logs to Monitor (cluster, node)
### Bug fix
- Fixed the error when querying empty metric data (#144)
- Fixed empty host when setup step finishes (#147)
- Fixed the error of obtaining suggestions of field's value in discover (#151)
- Fixed the wrong display of heatmap's data in alerting message (#157)
- Fixed Devtools `_sql` support for elasticsearch 6.x (#158)
- Fixed audit log default sorting across pagination (#161)
- Fixed mapping type conflict error (#164)
- Fixed `Gateway` template config for mTLS#166
### Improvements
- Update agent config with cluster name (#148)
- Optimize UI of histogram and datepicker in discover (#151)
- Support viewing logs for cluster, node, index health change events (#150)
- Enhance LDAP authentication logging (#156)
- Optimize UI for copying metric requests (#155)
- Enhance deletion tips by adding cluster info for indices
- Support clearing offline agent instances (#165)
## 1.28.2 (2025-02-15)
### Features
- Support alerts based on bucket diff state (#119)
- Add rollup ilm when use Easysearch (#128)
- Log activity for cluster metric collection mode changes (#152)
### Bug fix
- Fixed missing data when processing multiple time series in a group with insight data API (#127)
- Fixed incorrect node health change activity logging (#154)
### Improvements
- Add Buckets Diff to alerting rule
- Automatically create Agent metrics for system clusters when using Easysearch to store metrics Write least-privileged user (#120)
- Add Copy request to alerting chart
- Add credential settings for agent in enrolling agent
- Add collection mode to cluster editing
- Add default roles to fix the issue (#114) (#130)
- Add agent connection config with `version` and `distribution` to avoid panic at start(#131)
## 1.28.1 (2025-01-24)
### Breaking changes
### Features
- Support function-format parameters in Insight Data API
- Support configuring multiple hosts when creating a cluster
- Provide an API to dump and view the current list of stored Badger keys
- Rollup supports scenario-based condition adaptation, requires Easysearch > 1.9.2
- TopN built-in metric - Index metric (agent collection mode)
- TopN built-in view templates
- TopN supports custom data views
### Bug fix
- Fix the issue of high memory usage in Badger KV storage
### Improvements
- LDAP configuration supports validation with special character "dot" (.) (#46)
## 1.28.0 (2025-01-11)
### Breaking changes
### Features
- Add allocation to activities if is cluster health change and changed to red.
- Add index metrics for segment memory (norms, points, version map, fixed bit set).
- Support querying top N metrics in the Insight Data Query API
- Add insight metric CURD API for managing custom metrics
- Add built-in metrics templates for common use cases
### Bug fix
- Fixed query thread pool metrics when cluster uuid is empty
- Fixed unit tests
### Improvements
- Optimize UI of agent list when its columns are overflow.
- Add loading to each row in overview table.
- Adapter metrics query with cluster id and cluster uuid
- Optimize metric query bucket size (#59)
- Add suggestion to chart in monitor if is no data because the time interval is less than the collection interval.
- Check if the cluster version supports metric transport_outbound_comnections in monitor.
- Set timeout to 10s by default in DatePicker's time settings.
- Check if the cluster version supports metric transport_outbound_comnections in monitor.
- Enhanced http_client to support customizable configurations.
## 1.27.0 (2024-12-13)
### Improvements
- The code is open source and the [Github repository](https://github.com/infinilabs/console) is used for development
- Split monitoring metric requests to optimize monitoring metric query response speed.
- Optimizing ES metric collecting
- Split monitoring metric requests to optimize monitoring metric query response speed.
- Added timeout setting to general time component
- Cluster selection component adds registration and refresh feature
- Adding metrics collection status
- Optimizing layout of table component
### Bug fix
- Fixed the issue of untimely update of cluster metadata
- Fixed the issue of incorrect links in help documents, etc.
- Fixed node and index metadata ID to prevent metadata record duplication.

View File

@ -31,21 +31,16 @@ asciinema: true
### 配置告警指标以及触发条件
{{% load-img "/img/screenshot/2025/alerting/rule_condition.png" "alerting rule new" %}}
{{% load-img "/img/screenshot/20220715-alerting-rule-new-metric.jpg" "alerting rule new" %}}
- 输入规则名称
- 按需添加分组的字段以及分组大小,可以添加多个,用于 terms 聚合
- 选择指标聚合字段以及统计类型,可以配置多个,当配置多个时必须配置公式用于计算最终的指标
- 配置告警触发条件
- 选择指标数值
- 选择分桶对比
- 选择基于文档差异数或者内容差异数
> 文档差异数:相邻两个时间桶内命中文档数量的差值
> 内容差异数:相邻两个时间桶内某个分组是否有变化,差异值为 1 表示增加,-1 表示减少0 表示无变化
- 选择执行检查周期
- 输入告警事件标题(模版,被模版变量中的 title 引用,点击这里了解 [模版语法](./variables)
- 输入告警事件消息(模版,被模版变量中的 message 引用,点击这里了解 [模版语法](./variables)
>分桶对比是 INFINI Console 1.28.2 版本新增的功能,可以用于检测不同时间段数据的差异,比如检测某个时间段内的数据量是否有异常变化
### 配置消息通知
{{% load-img "/img/screenshot/20220715-alerting-rule-new-notification.jpg" "alerting rule new" %}}

View File

@ -7,105 +7,11 @@ title: "版本历史"
这里是 INFINI Console 历史版本发布的相关说明。
## Latest (In development)
### Breaking changes
### Features
- 监控(集群、节点)新增日志查询
### Bug fix
- 修复指标数据为空时的查询错误 (#144)
- 修复初始化结束步骤中主机显示为错误的问题 (#147)
- 修复数据探索中获取字段值建议的错误 (#151)
- 修复告警消息热图数据显示错误的问题 (#157)
- 修复开发工具 `_sql` 查询支撑 Elasticsearch 6.x 版本 (#158)
- 修复审计日志默认排序翻页之后丢失的问题 (#161)
- 修复 `Mapping` 冲突问题 (#161)
- 修复配置文件模板中 `Gateway` mTLS 配置(#166
### Improvements
- 优化下发给 Agent 的配置,增加集群名称 (#148)
- 优化柱状图和时间选择器的 UI (#151)
- 集群,节点,索引健康状态变更支持查看日志 (#150)
- 增强 LDAP 身份验证的日志记录 (#156)
- 优化监控报表里拷贝指标请求的 UI (#155)
- 删除索引提示增加集群信息 (#162)
## 1.28.2 (2025-02-15)
### Features
- 告警功能支持根据桶之间文档数差值和内容差异告警 (#119)
- 当使用 Easysearch 存储指标时,增加 Rollup 索引生命周期 (#128)
- 增加集群指标采集模式变更事件 (#152)
- 支持清理离线 Agent 实例(#165)
### Bug fix
- 修复 Insight API 处理多时间序列数据时数据丢失的问题 (#127)
- 修复错误的节点健康状态变更事件 (#154)
### Improvements
- 告警图表新增复制请求
- 在注册 Agent 中新增 Agent 凭据设置
- 在集群编辑中新增采集模式
- 当使用 Easysearch 存储指标时,自动为系统集群创建 Agent 指标写入最小权限用户 (#120)
- 优化 LDAP 用户映射增加默认权限组 (#114) (#130)
- 优化 Agent 连接 Easysearch 的配置信息中增加 `version``distribution` 来解决启动时退出问题 (#131)
## 1.28.1 (2025-01-24)
### Features
- 创建集群时支持配置多个主机地址,增强集群的高可用性
- Insight Data API 支持函数格式查询,方便拓展查询功能
- 提供 API 来 Dump 查看当前存储的 Badger Key 列表
- Rollup 支持场景条件适配,要求 Easysearch > 1.9.2
- TopN 内置指标- 索引指标agent 采集模式)
- TopN 内置视图模版
- TopN 支持自定义数据视图
### Bug fix
- 修复 Badger KV 存储内存占用过高的问题
### Improvements
- LDAP 配置支持带特殊符号“点”(.) 验证(#46)
## 1.28.0 (2025-01-11)
### Features
- 在集群健康状态变为红色时,将分配活动记录到动态日志中。
- 为索引增加段内存指标(包括 norms、points、version map、fixed bit set
- 支持在 Insight 数据查询 API 中查询 Top N 指标。
- 新增 Insight 指标 CURD API用于管理自定义指标。
- 添加多个常见用例的内置指标模板。
### Bug fix
- 修复当集群 UUID 为空时查询线程池指标的问题。
- 修复单元测试中的问题。
### Improvements
- 优化 Agent 列表的 UI当列数据溢出时改善显示效果。
- 在概览表格的每一行添加加载动画。
- 支持通过集群 ID 和集群 UUID 查询指标。
- 优化指标查询的桶大小设置 (#59)。
- 在监控图表中,如果时间间隔小于收集间隔导致无数据显示时,添加提示。
- 检查监控中集群版本是否支持 metric transport_outbound_connections。
- 将 DatePicker 的时间设置默认超时时间调整为 10 秒。
- 检查监控中集群版本是否支持 metric transport_outbound_connections。
- 增强 http_client支持更多自定义配置选项。
## 1.27.0 (2024-12-13)
### Improvements
- 代码开源,统一采用 Github [仓库](https://github.com/infinilabs/console) 进行开发
- 代码开源,统一采用 [Github 仓库](https://github.com/infinilabs/console) 进行开发
- 指标采集优化,由原来的单一协程采集调整为每个注册的集群有单独的协程进行采集
- 指标监控页面图表展示采用懒加载、单个图表独立加载,增强用户体验
- 通用时间控件增加超时时间设置

Binary file not shown.

Before

Width:  |  Height:  |  Size: 115 KiB

View File

@ -159,6 +159,7 @@ func main() {
orm.RegisterSchemaWithIndexName(api3.RemoteConfig{}, "configs")
orm.RegisterSchemaWithIndexName(model.AuditLog{}, "audit-logs")
orm.RegisterSchemaWithIndexName(host.HostInfo{}, "host")
orm.RegisterSchemaWithIndexName(insight.MetricBase{}, "metric")
module.Start()

View File

@ -44,28 +44,6 @@ func (cond *Condition) GetMinimumPeriodMatch() int {
return minPeriodMatch
}
func (cond *Condition) GetMaxBucketCount() int {
var bucketCount = 0
for _, citem := range cond.Items {
if citem.BucketCount > bucketCount {
bucketCount = citem.BucketCount
}
}
return bucketCount
}
// BucketDiffType represents the type of bucket difference
type BucketDiffType string
// Constants defining possible bucket difference types
const (
// BucketDiffTypeSize indicates the difference in bucket size
BucketDiffTypeSize BucketDiffType = "size"
// BucketDiffTypeContent indicates the difference in bucket content
BucketDiffTypeContent BucketDiffType = "content"
)
type ConditionItem struct {
//MetricName string `json:"metric"`
MinimumPeriodMatch int `json:"minimum_period_match"`
@ -73,10 +51,6 @@ type ConditionItem struct {
Values []string `json:"values"`
Priority string `json:"priority"`
Expression string `json:"expression,omitempty"`
//bucket condition type, e.g: size, content
Type BucketDiffType `json:"type,omitempty"`
// Represents the number of buckets in the bucket condition type.
BucketCount int `json:"bucket_count,omitempty"`
}
func (cond *ConditionItem) GenerateConditionExpression() (conditionExpression string, err error) {

View File

@ -81,15 +81,10 @@ type QueryResult struct {
type MetricData struct {
GroupValues []string `json:"group_values"`
Data map[string][]MetricDataItem `json:"data"`
Data map[string][]TimeMetricData `json:"data"`
}
type MetricDataItem struct {
Timestamp interface{} `json:"timestamp,omitempty"`
Value interface{} `json:"value"`
Groups []string `json:"groups,omitempty"`
DocCount int `json:"doc_count,omitempty"`
}
type TimeMetricData []interface{}
type AlertMetricItem struct {
common.MetricItem

View File

@ -54,9 +54,8 @@ type Rule struct {
Name string `json:"name" elastic_mapping:"name: { type: keyword }"`
Id string `json:"id" elastic_mapping:"id: { type: keyword }"`
} `json:"creator" elastic_mapping:"creator:{type:object}"`
Category string `json:"category,omitempty" elastic_mapping:"category: { type: keyword,copy_to:search_text }"`
Tags []string `json:"tags,omitempty" elastic_mapping:"tags: { type: keyword,copy_to:search_text }"`
BucketConditions *Condition `json:"bucket_conditions" elastic_mapping:"bucket_conditions:{type:object}"`
Category string `json:"category,omitempty" elastic_mapping:"category: { type: keyword,copy_to:search_text }"`
Tags []string `json:"tags,omitempty" elastic_mapping:"tags: { type: keyword,copy_to:search_text }"`
}
func (rule *Rule) GetOrInitExpression() (string, error) {

View File

@ -31,6 +31,7 @@ import (
"fmt"
"regexp"
"infini.sh/framework/core/orm"
"infini.sh/framework/core/util"
)
@ -79,6 +80,25 @@ type Metric struct {
Unit string `json:"unit,omitempty"`
}
type MetricBase struct {
orm.ORMObjectBase
//display name of the metric
Name string `json:"name"`
//metric identifier
Key string `json:"key"`
//optional values : "node", "indices", "shard"
Level string `json:"level"`
//metric calculation formula
Formula string `json:"formula,omitempty"`
Items []MetricItem `json:"items"`
FormatType string `json:"format,omitempty"`
Unit string `json:"unit,omitempty"`
//determine if this metric is built-in
Builtin bool `json:"builtin"`
//array of supported calculation statistic, eg: "avg", "sum", "min", "max"
Statistics []string `json:"statistics,omitempty"`
}
type GroupSort struct {
Key string `json:"key"`
Direction string `json:"direction"`

View File

@ -31,11 +31,6 @@ import (
"context"
"errors"
"fmt"
"net/http"
"runtime"
"sync/atomic"
"time"
"github.com/buger/jsonparser"
log "github.com/cihub/seelog"
"infini.sh/console/plugin/managed/server"
@ -48,6 +43,10 @@ import (
"infini.sh/framework/modules/elastic/adapter"
"infini.sh/framework/modules/elastic/common"
"infini.sh/framework/modules/elastic/metadata"
"net/http"
"runtime"
"sync/atomic"
"time"
)
// node -> binding item
@ -108,7 +107,7 @@ func refreshNodesInfo(instanceID, instanceEndpoint string) (*elastic.DiscoveryRe
return nil, fmt.Errorf("error on get binding nodes info: %w", err)
}
ctxTimeout, cancel := context.WithTimeout(context.Background(), time.Second*30)
ctxTimeout, cancel := context.WithTimeout(context.Background(), time.Second*10)
defer cancel()
nodesInfo, err := GetElasticsearchNodesViaAgent(ctxTimeout, instanceEndpoint)
if err != nil {
@ -604,11 +603,6 @@ func (h *APIHandler) bindInstanceToCluster(clusterInfo ClusterInfo, nodes *elast
if util.ContainStr(ip, "::") {
ip = fmt.Sprintf("[%s]", ip)
}
if util.ContainStr(ip, "*") {
ip = util.LocalAddress
}
nodeHost := fmt.Sprintf("%s:%d", ip, port)
nodeInfo := h.internalProcessBind(clusterID, clusterUUID, instanceID, instanceEndpoint, pid, nodeHost, auth)
if nodeInfo != nil {

View File

@ -197,32 +197,22 @@ func getAgentIngestConfigs(instance string, items map[string]BindingItem) (strin
var username = ""
var password = ""
var version = ""
var distribution = ""
var clusterName = ""
if metadata.Config != nil {
version = metadata.Config.Version
distribution = metadata.Config.Distribution
clusterName = metadata.Config.Name
if metadata.Config.AgentCredentialID != "" {
credential, err := common2.GetCredential(metadata.Config.AgentCredentialID)
if err != nil {
log.Error(err)
continue
}
var dv interface{}
dv, err = credential.Decode()
if err != nil {
log.Error(err)
continue
}
if auth, ok := dv.(model.BasicAuth); ok {
username = auth.Username
password = auth.Password.Get()
}
if metadata.Config.AgentCredentialID != "" {
credential, err := common2.GetCredential(metadata.Config.AgentCredentialID)
if err != nil {
log.Error(err)
continue
}
var dv interface{}
dv, err = credential.Decode()
if err != nil {
log.Error(err)
continue
}
if auth, ok := dv.(model.BasicAuth); ok {
username = auth.Username
password = auth.Password.Get()
}
}
@ -248,22 +238,20 @@ func getAgentIngestConfigs(instance string, items map[string]BindingItem) (strin
}
taskID := v.ClusterID + "_" + v.NodeUUID
buffer.Write([]byte(fmt.Sprintf("\n - name: \"%v\"\n path: ./config/task_config.tpl\n "+
"variable:\n "+
"TASK_ID: %v\n "+
"CLUSTER_ID: %v\n "+
"CLUSTER_NAME: %v\n "+
"CLUSTER_UUID: %v\n "+
"NODE_UUID: %v\n "+
"CLUSTER_VERSION: %v\n "+
"CLUSTER_DISTRIBUTION: %v\n "+
"CLUSTER_ENDPOINT: [\"%v\"]\n "+
"CLUSTER_USERNAME: \"%v\"\n "+
"CLUSTER_PASSWORD: \"%v\"\n "+
"CLUSTER_LEVEL_TASKS_ENABLED: %v\n "+
"NODE_LEVEL_TASKS_ENABLED: %v\n "+
"NODE_LOGS_PATH: \"%v\"\n\n\n", taskID, taskID,
v.ClusterID, clusterName, v.ClusterUUID, v.NodeUUID, version, distribution, nodeEndPoint, username, password, clusterLevelEnabled, nodeLevelEnabled, pathLogs)))
v.ClusterID, v.ClusterUUID, v.NodeUUID, nodeEndPoint, username, password, clusterLevelEnabled, nodeLevelEnabled, pathLogs)))
}
hash := util.MD5digest(buffer.String())

View File

@ -27,7 +27,6 @@ import (
"context"
"encoding/json"
"fmt"
"infini.sh/framework/core/queue"
"math"
"net/http"
"strconv"
@ -108,9 +107,6 @@ func (h *APIHandler) HandleCreateClusterAction(w http.ResponseWriter, req *http.
if conf.Distribution == "" {
conf.Distribution = elastic.Elasticsearch
}
if conf.MetricCollectionMode == "" {
conf.MetricCollectionMode = elastic.ModeAgentless
}
err = orm.Create(ctx, conf)
if err != nil {
log.Error(err)
@ -187,7 +183,6 @@ func (h *APIHandler) HandleUpdateClusterAction(w http.ResponseWriter, req *http.
h.Error404(w)
return
}
var oldCollectionMode = originConf.MetricCollectionMode
buf := util.MustToJSONBytes(originConf)
source := map[string]interface{}{}
util.MustFromJSONBytes(buf, &source)
@ -260,10 +255,7 @@ func (h *APIHandler) HandleUpdateClusterAction(w http.ResponseWriter, req *http.
h.WriteError(w, err.Error(), http.StatusInternalServerError)
return
}
// record cluster metric collection mode change activity
if oldCollectionMode != newConf.MetricCollectionMode {
recordCollectionModeChangeActivity(newConf.ID, newConf.Name, oldCollectionMode, newConf.MetricCollectionMode)
}
basicAuth, err := common.GetBasicAuth(newConf)
if err != nil {
h.WriteError(w, err.Error(), http.StatusInternalServerError)
@ -281,47 +273,6 @@ func (h *APIHandler) HandleUpdateClusterAction(w http.ResponseWriter, req *http.
h.WriteUpdatedOKJSON(w, id)
}
func recordCollectionModeChangeActivity(clusterID, clusterName, oldMode, newMode string) {
activityInfo := &event.Activity{
ID: util.GetUUID(),
Timestamp: time.Now(),
Metadata: event.ActivityMetadata{
Category: "elasticsearch",
Group: "platform",
Name: "metric_collection_mode_change",
Type: "update",
Labels: util.MapStr{
"cluster_id": clusterID,
"cluster_name": clusterName,
"from": oldMode,
"to": newMode,
},
},
}
queueConfig := queue.GetOrInitConfig("platform##activities")
if queueConfig.Labels == nil {
queueConfig.ReplaceLabels(util.MapStr{
"type": "platform",
"name": "activity",
"category": "elasticsearch",
"activity": true,
})
}
err := queue.Push(queueConfig, util.MustToJSONBytes(event.Event{
Timestamp: time.Now(),
Metadata: event.EventMetadata{
Category: "elasticsearch",
Name: "activity",
},
Fields: util.MapStr{
"activity": activityInfo,
}}))
if err != nil {
log.Error(err)
}
}
func (h *APIHandler) HandleDeleteClusterAction(w http.ResponseWriter, req *http.Request, ps httprouter.Params) {
resBody := map[string]interface{}{}
id := ps.MustGetParameter("id")

View File

@ -37,11 +37,8 @@ func GetMonitorState(clusterID string) string {
if conf == nil {
panic(fmt.Errorf("config of cluster [%s] is not found", clusterID))
}
if conf.MetricCollectionMode == "" {
if conf.MonitorConfigs != nil && !conf.MonitorConfigs.NodeStats.Enabled && !conf.MonitorConfigs.IndexStats.Enabled {
return elastic.ModeAgent
}
return elastic.ModeAgentless
if conf.MonitorConfigs != nil && !conf.MonitorConfigs.NodeStats.Enabled && !conf.MonitorConfigs.IndexStats.Enabled {
return elastic.ModeAgent
}
return conf.MetricCollectionMode
return elastic.ModeAgentless
}

View File

@ -80,7 +80,6 @@ func (h *APIHandler) HandleProxyAction(w http.ResponseWriter, req *http.Request,
}
if strings.Trim(newURL.Path, "/") == "_sql" {
distribution := esClient.GetVersion().Distribution
version := esClient.GetVersion().Number
indexName, err := rewriteTableNamesOfSqlRequest(req, distribution)
if err != nil {
h.WriteError(w, err.Error(), http.StatusInternalServerError)
@ -93,15 +92,6 @@ func (h *APIHandler) HandleProxyAction(w http.ResponseWriter, req *http.Request,
q, _ := url.ParseQuery(newURL.RawQuery)
hasFormat := q.Has("format")
switch distribution {
case elastic.Elasticsearch:
if !hasFormat {
q.Add("format", "txt")
}
if large, _ := util.VersionCompare(version, "7.0.0"); large > 0 {
path = "_sql?" + q.Encode()
} else {
path = "_xpack/_sql?" + q.Encode()
}
case elastic.Opensearch:
path = "_plugins/_sql?format=raw"
case elastic.Easysearch:

View File

@ -48,8 +48,7 @@ type LDAPConfig struct {
UidAttribute string `config:"uid_attribute"`
GroupAttribute string `config:"group_attribute"`
DefaultRoles []string `config:"default_roles"`
RoleMapping struct {
RoleMapping struct {
Group map[string][]string `config:"group"`
Uid map[string][]string `config:"uid"`
} `config:"role_mapping"`
@ -82,9 +81,6 @@ func (r *LDAPRealm) mapLDAPRoles(authInfo auth.Info) []string {
}
//map group
if len(authInfo.GetGroups()) == 0 {
log.Debugf("LDAP uid: %v, user: %v, group: %v", uid, authInfo, authInfo.GetGroups())
}
for _, roleName := range authInfo.GetGroups() {
newRoles, ok := r.config.RoleMapping.Group[roleName]
if ok {
@ -97,9 +93,6 @@ func (r *LDAPRealm) mapLDAPRoles(authInfo auth.Info) []string {
}
}
//auto append default roles
ret = append(ret, r.config.DefaultRoles...)
return ret
}

View File

@ -77,9 +77,9 @@ func Init(config *config.Config) {
func Authenticate(username, password string) (bool, *rbac.User, error) {
for _, realm := range realms {
for i, realm := range realms {
ok, user, err := realm.Authenticate(username, password)
log.Debugf("authenticate result: %v, user: %v, err: %v, realm: %v", ok, user, err, realm.GetType())
log.Debugf("authenticate result: %v, user: %v, err: %v, realm: %v", ok, user, err, i)
if ok && user != nil && err == nil {
return true, user, nil
}
@ -92,14 +92,14 @@ func Authenticate(username, password string) (bool, *rbac.User, error) {
func Authorize(user *rbac.User) (bool, error) {
for _, realm := range realms {
for i, realm := range realms {
//skip if not the same auth provider, TODO: support cross-provider authorization
if user.AuthProvider != realm.GetType() {
continue
}
ok, err := realm.Authorize(user)
log.Debugf("authorize result: %v, user: %v, err: %v, realm: %v", ok, user, err, realm.GetType())
log.Debugf("authorize result: %v, user: %v, err: %v, realm: %v", ok, user, err, i)
if ok && err == nil {
//return on any success, TODO, maybe merge all roles and privileges from all realms
return true, nil

View File

@ -404,23 +404,13 @@ func (h *AlertAPI) getAlertMessage(w http.ResponseWriter, req *http.Request, ps
return
}
metricExpression, _ := rule.Metrics.GenerateExpression()
var (
hitCondition string
bucketDiffType string
)
conditions := rule.Conditions
if rule.BucketConditions != nil {
conditions = *rule.BucketConditions
}
for i, cond := range conditions.Items {
var hitCondition string
for i, cond := range rule.Conditions.Items {
expression, _ := cond.GenerateConditionExpression()
if cond.Priority == message.Priority {
hitCondition = strings.ReplaceAll(expression, "result", "")
if rule.BucketConditions != nil {
bucketDiffType = string(cond.Type)
}
}
conditions.Items[i].Expression = strings.ReplaceAll(expression, "result", metricExpression)
rule.Conditions.Items[i].Expression = strings.ReplaceAll(expression, "result", metricExpression)
}
var duration time.Duration
if message.Status == alerting.MessageStateRecovered {
@ -429,28 +419,26 @@ func (h *AlertAPI) getAlertMessage(w http.ResponseWriter, req *http.Request, ps
duration = time.Now().Sub(message.Created)
}
detailObj := util.MapStr{
"message_id": message.ID,
"rule_id": message.RuleID,
"rule_name": rule.Name,
"rule_enabled": rule.Enabled,
"title": message.Title,
"message": message.Message,
"priority": message.Priority,
"created": message.Created,
"updated": message.Updated,
"resource_name": rule.Resource.Name,
"resource_id": rule.Resource.ID,
"resource_objects": rule.Resource.Objects,
"conditions": rule.Conditions,
"bucket_conditions": rule.BucketConditions,
"bucket_diff_type": bucketDiffType,
"duration": duration.Milliseconds(),
"ignored_time": message.IgnoredTime,
"ignored_reason": message.IgnoredReason,
"ignored_user": message.IgnoredUser,
"status": message.Status,
"expression": rule.Metrics.Expression,
"hit_condition": hitCondition,
"message_id": message.ID,
"rule_id": message.RuleID,
"rule_name": rule.Name,
"rule_enabled": rule.Enabled,
"title": message.Title,
"message": message.Message,
"priority": message.Priority,
"created": message.Created,
"updated": message.Updated,
"resource_name": rule.Resource.Name,
"resource_id": rule.Resource.ID,
"resource_objects": rule.Resource.Objects,
"conditions": rule.Conditions,
"duration": duration.Milliseconds(),
"ignored_time": message.IgnoredTime,
"ignored_reason": message.IgnoredReason,
"ignored_user": message.IgnoredUser,
"status": message.Status,
"expression": rule.Metrics.Expression,
"hit_condition": hitCondition,
}
h.WriteJSON(w, detailObj, http.StatusOK)
}

View File

@ -201,13 +201,9 @@ func (alertAPI *AlertAPI) getRuleDetail(w http.ResponseWriter, req *http.Request
return
}
metricExpression, _ := obj.Metrics.GenerateExpression()
conditions := obj.Conditions
if obj.BucketConditions != nil {
conditions = *obj.BucketConditions
}
for i, cond := range conditions.Items {
for i, cond := range obj.Conditions.Items {
expression, _ := cond.GenerateConditionExpression()
conditions.Items[i].Expression = strings.ReplaceAll(expression, "result", metricExpression)
obj.Conditions.Items[i].Expression = strings.ReplaceAll(expression, "result", metricExpression)
}
alertNumbers, err := alertAPI.getRuleAlertMessageNumbers([]string{obj.ID})
if err != nil {
@ -333,7 +329,6 @@ func (alertAPI *AlertAPI) getRuleDetail(w http.ResponseWriter, req *http.Request
"bucket_size": obj.Metrics.BucketSize, //统计周期
"updated": obj.Updated,
"conditions": obj.Conditions,
"bucket_conditions": obj.BucketConditions,
"message_count": alertNumbers[obj.ID], //所有关联告警消息数(包括已恢复的)
"state": state,
"enabled": obj.Enabled,

View File

@ -56,4 +56,7 @@ func InitAPI() {
api.HandleAPIMethod(api.POST, "/elasticsearch/:id/map_label/_render", insight.renderMapLabelTemplate)
api.HandleAPIMethod(api.GET, "/insight/widget/:widget_id", insight.getWidget)
api.HandleAPIMethod(api.POST, "/insight/widget", insight.RequireLogin(insight.createWidget))
api.HandleAPIMethod(api.POST, "/insight/metric", insight.createMetric)
api.HandleAPIMethod(api.PUT, "/insight/metric/:metric_id", insight.updateMetric)
api.HandleAPIMethod(api.DELETE, "/insight/metric/:metric_id", insight.deleteMetric)
}

View File

@ -296,11 +296,7 @@ func getMetricData(metric *insight.Metric) (interface{}, error) {
Groups: md.Groups,
Data: map[string][]insight.MetricDataItem{},
}
//merge metric data by timestamp
var timeMetricData = map[interface{}]*insight.MetricDataItem{}
//non time series data
grpMetricData := &insight.MetricDataItem{}
isTimeSeries := false
retMetricDataItem := insight.MetricDataItem{}
for _, formula = range metric.Formulas {
tpl, err := template.New("insight_formula").Parse(formula)
if err != nil {
@ -351,19 +347,6 @@ func getMetricData(metric *insight.Metric) (interface{}, error) {
continue
}
}
var retMetricDataItem *insight.MetricDataItem
//time series data
if timestamp != nil {
isTimeSeries = true
if v, ok := timeMetricData[timestamp]; !ok {
retMetricDataItem = &insight.MetricDataItem{}
} else {
retMetricDataItem = v
}
} else {
//non time series data
retMetricDataItem = grpMetricData
}
retMetricDataItem.Timestamp = timestamp
if len(metric.Formulas) <= 1 && metric.Formula != "" {
//support older versions by returning the result for a single formula.
@ -375,18 +358,9 @@ func getMetricData(metric *insight.Metric) (interface{}, error) {
retMetricDataItem.Value = map[string]interface{}{formula: result}
}
}
if timestamp != nil {
timeMetricData[timestamp] = retMetricDataItem
}
}
}
if !isTimeSeries {
targetData.Data["result"] = append(targetData.Data["result"], *grpMetricData)
} else {
for _, v := range timeMetricData {
targetData.Data["result"] = append(targetData.Data["result"], *v)
}
}
targetData.Data["result"] = append(targetData.Data["result"], retMetricDataItem)
targetMetricData = append(targetMetricData, targetData)
}
}

View File

@ -0,0 +1,166 @@
// Copyright (C) INFINI Labs & INFINI LIMITED.
//
// The INFINI Console is offered under the GNU Affero General Public License v3.0
// and as commercial software.
//
// For commercial licensing, contact us at:
// - Website: infinilabs.com
// - Email: hello@infini.ltd
//
// Open Source licensed under AGPL V3:
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
/* Copyright © INFINI Ltd. All rights reserved.
* web: https://infinilabs.com
* mail: hello#infini.ltd */
package insight
import (
"errors"
log "github.com/cihub/seelog"
"infini.sh/console/model/insight"
httprouter "infini.sh/framework/core/api/router"
"infini.sh/framework/core/orm"
"infini.sh/framework/core/util"
"infini.sh/framework/modules/elastic"
"net/http"
)
func (h *InsightAPI) createMetric(w http.ResponseWriter, req *http.Request, ps httprouter.Params) {
var obj = &insight.MetricBase{}
err := h.DecodeJSON(req, obj)
if err != nil {
h.WriteError(w, err.Error(), http.StatusInternalServerError)
log.Error(err)
return
}
err = orm.Create(nil, obj)
if err != nil {
h.WriteError(w, err.Error(), http.StatusInternalServerError)
log.Error(err)
return
}
h.WriteJSON(w, util.MapStr{
"_id": obj.ID,
"result": "created",
}, 200)
}
func (h *InsightAPI) getMetric(w http.ResponseWriter, req *http.Request, ps httprouter.Params) {
id := ps.MustGetParameter("metric_id")
obj := insight.MetricBase{}
obj.ID = id
_, err := orm.Get(&obj)
if err != nil {
if errors.Is(err, elastic.ErrNotFound) {
h.WriteJSON(w, util.MapStr{
"_id": id,
"found": false,
}, http.StatusNotFound)
return
}
h.WriteError(w, err.Error(), http.StatusInternalServerError)
return
}
h.WriteJSON(w, util.MapStr{
"found": true,
"_id": id,
"_source": obj,
}, 200)
}
func (h *InsightAPI) updateMetric(w http.ResponseWriter, req *http.Request, ps httprouter.Params) {
id := ps.MustGetParameter("metric_id")
obj := insight.MetricBase{}
obj.ID = id
_, err := orm.Get(&obj)
if err != nil {
if errors.Is(err, elastic.ErrNotFound) {
h.WriteJSON(w, util.MapStr{
"_id": id,
"found": false,
}, http.StatusNotFound)
return
}
h.WriteError(w, err.Error(), http.StatusInternalServerError)
return
}
id = obj.ID
create := obj.Created
obj = insight.MetricBase{}
err = h.DecodeJSON(req, &obj)
if err != nil {
h.WriteError(w, err.Error(), http.StatusInternalServerError)
log.Error(err)
return
}
//protect
obj.ID = id
obj.Created = create
err = orm.Update(nil, &obj)
if err != nil {
h.WriteError(w, err.Error(), http.StatusInternalServerError)
log.Error(err)
return
}
h.WriteJSON(w, util.MapStr{
"_id": obj.ID,
"result": "updated",
}, 200)
}
func (h *InsightAPI) deleteMetric(w http.ResponseWriter, req *http.Request, ps httprouter.Params) {
id := ps.MustGetParameter("metric_id")
obj := insight.MetricBase{}
obj.ID = id
_, err := orm.Get(&obj)
if err != nil {
if errors.Is(err, elastic.ErrNotFound) {
h.WriteJSON(w, util.MapStr{
"_id": id,
"found": false,
}, http.StatusNotFound)
return
}
h.WriteError(w, err.Error(), http.StatusInternalServerError)
return
}
if obj.Builtin {
h.WriteError(w, "cannot delete builtin metrics", http.StatusBadRequest)
return
}
err = orm.Delete(nil, &obj)
if err != nil {
h.WriteError(w, err.Error(), http.StatusInternalServerError)
log.Error(err)
return
}
h.WriteJSON(w, util.MapStr{
"_id": obj.ID,
"result": "deleted",
}, 200)
}

View File

@ -31,6 +31,7 @@ import (
"infini.sh/console/core/security/enum"
consoleModel "infini.sh/console/model"
"infini.sh/console/model/alerting"
"infini.sh/console/model/insight"
"infini.sh/framework/core/elastic"
"infini.sh/framework/core/event"
"infini.sh/framework/core/model"
@ -211,6 +212,10 @@ func GetCollectionMetas() map[string]CollectionMeta {
},
MatchObject: &alerting.Rule{},
},
"metric": {
Name: "metric",
MatchObject: &insight.MetricBase{},
},
}
})
return collectionMetas

View File

@ -132,7 +132,7 @@ func (processor *MetadataProcessor) HandleUnknownNodeStatus(ev []byte) error {
}
esClient := elastic.GetClient(processor.config.Elasticsearch)
queryDslTpl := `{"script": {
"source": "ctx._source.metadata.labels.status='unknown'",
"source": "ctx._source.metadata.labels.status='unavailable'",
"lang": "painless"
},
"query": {

View File

@ -30,9 +30,6 @@ package server
import (
"context"
"fmt"
"infini.sh/framework/core/event"
"infini.sh/framework/core/global"
"infini.sh/framework/core/task"
"net/http"
"strconv"
"strings"
@ -79,8 +76,6 @@ func init() {
//try to connect to instance
api.HandleAPIMethod(api.POST, "/instance/try_connect", handler.RequireLogin(handler.tryConnect))
//clear instance that is not alive in 7 days
api.HandleAPIMethod(api.POST, "/instance/_clear", handler.RequirePermission(handler.clearInstance, enum.PermissionGatewayInstanceWrite))
}
@ -90,20 +85,18 @@ func (h APIHandler) registerInstance(w http.ResponseWriter, req *http.Request, p
err := h.DecodeJSON(req, obj)
if err != nil {
h.WriteError(w, err.Error(), http.StatusInternalServerError)
return
}
if obj.Endpoint == "" {
h.WriteError(w, "empty endpoint", http.StatusInternalServerError)
return
}
oldInst := &model.Instance{}
oldInst.ID = obj.ID
exists, err := orm.Get(oldInst)
if exists {
obj.Created = oldInst.Created
errMsg := fmt.Sprintf("agent [%s] already exists", obj.ID)
h.WriteError(w, errMsg, http.StatusInternalServerError)
return
}
err = orm.Save(nil, obj)
err = orm.Create(nil, obj)
if err != nil {
h.WriteError(w, err.Error(), http.StatusInternalServerError)
return
@ -376,168 +369,6 @@ func (h *APIHandler) getInstanceStatus(w http.ResponseWriter, req *http.Request,
}
h.WriteJSON(w, result, http.StatusOK)
}
func (h *APIHandler) clearInstance(w http.ResponseWriter, req *http.Request, ps httprouter.Params) {
appName := h.GetParameterOrDefault(req, "app_name", "")
task.RunWithinGroup("clear_instance", func(ctx context.Context) error {
err := h.clearInstanceByAppName(appName)
if err != nil {
log.Error(err)
}
return err
})
h.WriteAckOKJSON(w)
}
func (h *APIHandler) clearInstanceByAppName(appName string) error {
var (
size = 100
from = 0
)
// Paginated query for all running instances
q := orm.Query{
Size: size,
From: from,
}
if appName != "" {
q.Conds = orm.And(
orm.Eq("application.name", appName),
)
}
q.AddSort("created", orm.ASC)
insts := []model.Instance{}
var (
instanceIDs []string
toRemoveIDs []string
instsCache = map[string]*model.Instance{}
)
client := elastic2.GetClient(global.MustLookupString(elastic2.GlobalSystemElasticsearchID))
for {
err, _ := orm.SearchWithJSONMapper(&insts, &q)
if err != nil {
return err
}
for _, inst := range insts {
instanceIDs = append(instanceIDs, inst.ID)
instsCache[inst.ID] = &inst
}
if len(instanceIDs) == 0 {
break
}
aliveInstanceIDs, err := getAliveInstanceIDs(client, instanceIDs)
if err != nil {
return err
}
for _, instanceID := range instanceIDs {
if _, ok := aliveInstanceIDs[instanceID]; !ok {
toRemoveIDs = append(toRemoveIDs, instanceID)
}
}
if len(toRemoveIDs) > 0 {
// Use the same slice to avoid extra allocation
filteredIDs := toRemoveIDs[:0]
// check whether the instance is still online
for _, instanceID := range toRemoveIDs {
if inst, ok := instsCache[instanceID]; ok {
_, err = h.getInstanceInfo(inst.Endpoint, inst.BasicAuth)
if err == nil {
// Skip online instance, do not append to filtered list
continue
}
}
// Keep only offline instances
filteredIDs = append(filteredIDs, instanceID)
}
// Assign back after filtering
toRemoveIDs = filteredIDs
query := util.MapStr{
"query": util.MapStr{
"terms": util.MapStr{
"id": toRemoveIDs,
},
},
}
// remove instances
err = orm.DeleteBy(model.Instance{}, util.MustToJSONBytes(query))
if err != nil {
return fmt.Errorf("failed to delete instance: %w", err)
}
// remove instance related data
query = util.MapStr{
"query": util.MapStr{
"terms": util.MapStr{
"metadata.labels.agent_id": toRemoveIDs,
},
},
}
err = orm.DeleteBy(model.Setting{}, util.MustToJSONBytes(query))
}
// Exit loop when the number of returned records is less than the page size
if len(insts) <= size {
break
}
// Reset instance state for the next iteration
insts = []model.Instance{}
toRemoveIDs = nil
instsCache = make(map[string]*model.Instance)
q.From += size
}
return nil
}
func getAliveInstanceIDs(client elastic2.API, instanceIDs []string) (map[string]struct{}, error) {
query := util.MapStr{
"size": 0,
"query": util.MapStr{
"bool": util.MapStr{
"must": []util.MapStr{
{
"terms": util.MapStr{
"agent.id": instanceIDs,
},
},
{
"range": util.MapStr{
"timestamp": util.MapStr{
"gt": "now-7d",
},
},
},
},
},
},
"aggs": util.MapStr{
"grp_agent_id": util.MapStr{
"terms": util.MapStr{
"field": "agent.id",
},
"aggs": util.MapStr{
"count": util.MapStr{
"value_count": util.MapStr{
"field": "agent.id",
},
},
},
},
},
}
queryDSL := util.MustToJSONBytes(query)
ctx, cancel := context.WithTimeout(context.Background(), time.Second*10)
defer cancel()
response, err := client.QueryDSL(ctx, orm.GetWildcardIndexName(event.Event{}), nil, queryDSL)
if err != nil {
return nil, err
}
ret := map[string]struct{}{}
for _, bk := range response.Aggregations["grp_agent_id"].Buckets {
key := bk["key"].(string)
if bk["doc_count"].(float64) > 0 {
ret[key] = struct{}{}
}
}
return ret, nil
}
func (h *APIHandler) proxy(w http.ResponseWriter, req *http.Request, ps httprouter.Params) {
var (
@ -586,7 +417,7 @@ func (h *APIHandler) getInstanceInfo(endpoint string, basicAuth *model.BasicAuth
obj := &model.Instance{}
_, err := ProxyAgentRequest("runtime", endpoint, req1, obj)
if err != nil {
return nil, err
panic(err)
}
return obj, err

View File

@ -69,11 +69,6 @@ import (
"infini.sh/framework/plugins/replay"
)
// Easysearch auto create ingest user password
const ingestUser = "infini_ingest"
var ingestPassword = util.GenerateRandomString(20)
type Module struct {
api.Handler
}
@ -497,51 +492,36 @@ func (module *Module) initialize(w http.ResponseWriter, r *http.Request, ps http
if reuseOldCred {
toSaveCfg.CredentialID = oldCfg.CredentialID
} else {
credId := createCred("INFINI_SYSTEM", request.Cluster.Username, request.Cluster.Password)
cfg.CredentialID = credId
toSaveCfg.CredentialID = credId
cred := credential.Credential{
Name: "INFINI_SYSTEM",
Type: credential.BasicAuth,
Tags: []string{"infini", "system"},
Payload: map[string]interface{}{
"basic_auth": map[string]interface{}{
"username": request.Cluster.Username,
"password": request.Cluster.Password,
},
},
}
cred.ID = util.GetUUID()
err = cred.Encode()
if err != nil {
panic(err)
}
toSaveCfg.CredentialID = cred.ID
cfg.CredentialID = cred.ID
now := time.Now()
cred.Created = &now
err = orm.Save(nil, &cred)
if err != nil {
panic(err)
}
toSaveCfg.BasicAuth = nil
}
}
//保存默认集群
t := time.Now()
toSaveCfg.MetadataConfigs = &elastic.MetadataConfig{
HealthCheck: elastic.TaskConfig{
Enabled: true,
Interval: "10s",
},
ClusterSettingsCheck: elastic.TaskConfig{
Enabled: true,
Interval: "10s",
},
MetadataRefresh: elastic.TaskConfig{
Enabled: true,
Interval: "10s",
},
NodeAvailabilityCheck: elastic.TaskConfig{
Enabled: true,
Interval: "10s",
},
}
toSaveCfg.MonitorConfigs = &elastic.MonitorConfig{
ClusterStats: elastic.TaskConfig{
Enabled: true,
Interval: "10s",
},
NodeStats: elastic.TaskConfig{
Enabled: true,
Interval: "10s",
},
ClusterHealth: elastic.TaskConfig{
Enabled: true,
Interval: "10s",
},
IndexStats: elastic.TaskConfig{
Enabled: true,
Interval: "10s",
},
}
toSaveCfg.Created = &t
err = orm.Save(nil, &toSaveCfg)
if err != nil {
@ -618,7 +598,6 @@ func (module *Module) initialize(w http.ResponseWriter, r *http.Request, ps http
success = true
}
func (module *Module) validateSecret(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
err, client, request := module.initTempClient(r)
if err != nil {
@ -680,34 +659,6 @@ func validateCredentialSecret(ormHandler orm.ORM, credentialSecret string) (bool
return exists, nil
}
func createCred(name, username, password string) string {
cred := credential.Credential{
Name: name,
Type: credential.BasicAuth,
Tags: []string{"infini", "system"},
Payload: map[string]interface{}{
"basic_auth": map[string]interface{}{
"username": username,
"password": password,
},
},
}
cred.ID = util.GetUUID()
err := cred.Encode()
if err != nil {
panic(err)
}
now := time.Now()
cred.Created = &now
cred.Updated = &now
err = orm.Save(nil, &cred)
if err != nil {
panic(err)
}
return cred.ID
}
func getYamlData(filename string) []byte {
baseDir := path.Join(global.Env().GetConfigDir(), "setup")
filePath := path.Join(baseDir, "common", "data", filename)
@ -722,7 +673,6 @@ func getYamlData(filename string) []byte {
escapedContent = bytes.ReplaceAll(escapedContent, []byte("\""), []byte("\\\""))
return escapedContent
}
func (module *Module) initializeTemplate(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
if !global.Env().SetupRequired() {
module.WriteError(w, "setup not permitted", 500)
@ -758,7 +708,7 @@ func (module *Module) initializeTemplate(w http.ResponseWriter, r *http.Request,
elastic2.InitTemplate(true)
case "rollup":
if ver.Distribution == elastic.Easysearch {
if large, _ := util.VersionCompare(ver.Number, "1.10.1"); large > 0 {
if large, _ := util.VersionCompare(ver.Number, "1.10.0"); large > 0 {
useCommon = false
dslTplFileName = "template_rollup.tpl"
}
@ -770,17 +720,6 @@ func (module *Module) initializeTemplate(w http.ResponseWriter, r *http.Request,
case "view":
dslTplFileName = "view.tpl"
case "agent":
if ver.Distribution == elastic.Easysearch {
err = keystore.SetValue("SYSTEM_CLUSTER_INGEST_PASSWORD", []byte(ingestPassword))
if err != nil {
panic(err)
}
client := elastic.GetClient(GlobalSystemElasticsearchID)
err = initIngestUser(client, cfg1.IndexPrefix, ingestUser, ingestPassword)
if err != nil {
panic(err)
}
}
dslTplFileName = "agent.tpl"
default:
panic(fmt.Sprintf("unsupport template name [%s]", request.InitializeTemplate))
@ -836,7 +775,6 @@ func (module *Module) initializeTemplate(w http.ResponseWriter, r *http.Request,
}, http.StatusOK)
return
}
output := tpl.ExecuteFuncString(func(w io.Writer, tag string) (int, error) {
switch tag {
case "SETUP_SYSTEM_INGEST_CONFIG":
@ -857,18 +795,6 @@ func (module *Module) initializeTemplate(w http.ResponseWriter, r *http.Request,
return w.Write([]byte(request.Cluster.Username))
case "SETUP_ES_PASSWORD":
return w.Write([]byte(request.Cluster.Password))
case "SETUP_AGENT_USERNAME":
if ver.Distribution == elastic.Easysearch {
return w.Write([]byte(ingestUser))
} else {
return w.Write([]byte(request.Cluster.Username))
}
case "SETUP_AGENT_PASSWORD":
if ver.Distribution == elastic.Easysearch {
return w.Write([]byte(ingestPassword))
} else {
return w.Write([]byte(request.Cluster.Password))
}
case "SETUP_SCHEME":
return w.Write([]byte(strings.Split(request.Cluster.Endpoint, "://")[0]))
case "SETUP_ENDPOINTS":
@ -941,41 +867,3 @@ func (module *Module) initializeTemplate(w http.ResponseWriter, r *http.Request,
}, http.StatusOK)
}
func initIngestUser(client elastic.API, indexPrefix string, username, password string) error {
roleTpl := `{
"cluster": [
"cluster_monitor",
"cluster_composite_ops"
],
"description": "Provide the minimum permissions for INFINI AGENT to write metrics and logs",
"indices": [{
"names": [
"%slogs*", "%smetrics*"
],
"query": "",
"field_security": [],
"field_mask": [],
"privileges": [
"create_index","index","manage_aliases","write"
]
}]
}`
roleBody := fmt.Sprintf(roleTpl, indexPrefix, indexPrefix)
err := client.PutRole(username, []byte(roleBody))
if err != nil {
return fmt.Errorf("failed to create ingest role: %w", err)
}
userTpl := `{
"roles": [
"%s"
],
"password": "%s"}`
userBody := fmt.Sprintf(userTpl, username, password)
err = client.PutUser(username, []byte(userBody))
if err != nil {
return fmt.Errorf("failed to create ingest user: %w", err)
}
return nil
}

View File

@ -329,16 +329,9 @@ func getQueryTimeRange(rule *alerting.Rule, filterParam *alerting.FilterParam) (
} else {
return nil, fmt.Errorf("period interval: %s is too small", rule.Metrics.BucketSize)
}
var bucketCount int
if rule.BucketConditions != nil {
bucketCount = rule.BucketConditions.GetMaxBucketCount()
//for removing first and last time bucket
bucketCount += 2
} else {
bucketCount = rule.Conditions.GetMinimumPeriodMatch() + 1
}
bucketCount := rule.Conditions.GetMinimumPeriodMatch() + 1
if bucketCount <= 0 {
bucketCount = 2
bucketCount = 1
}
duration, err := time.ParseDuration(fmt.Sprintf("%d%s", value*bucketCount, units))
if err != nil {
@ -491,7 +484,7 @@ func (engine *Engine) GetTargetMetricData(rule *alerting.Rule, isFilterNaN bool,
} else {
targetData = alerting.MetricData{
GroupValues: md.GroupValues,
Data: map[string][]alerting.MetricDataItem{},
Data: map[string][]alerting.TimeMetricData{},
}
expression, err := govaluate.NewEvaluableExpression(rule.Metrics.Formula)
if err != nil {
@ -515,14 +508,14 @@ func (engine *Engine) GetTargetMetricData(rule *alerting.Rule, isFilterNaN bool,
}
//drop nil value bucket
if v == nil {
if v == nil || len(v[i]) < 2 {
continue DataLoop
}
if _, ok := v[i].Value.(float64); !ok {
if _, ok := v[i][1].(float64); !ok {
continue DataLoop
}
parameters[k] = v[i].Value
timestamp = v[i].Timestamp
parameters[k] = v[i][1]
timestamp = v[i][0]
}
if len(parameters) == 0 {
continue
@ -535,13 +528,13 @@ func (engine *Engine) GetTargetMetricData(rule *alerting.Rule, isFilterNaN bool,
if r, ok := result.(float64); ok {
if math.IsNaN(r) || math.IsInf(r, 0) {
if !isFilterNaN {
targetData.Data["result"] = append(targetData.Data["result"], alerting.MetricDataItem{Timestamp: timestamp, Value: math.NaN()})
targetData.Data["result"] = append(targetData.Data["result"], []interface{}{timestamp, math.NaN()})
}
continue
}
}
targetData.Data["result"] = append(targetData.Data["result"], alerting.MetricDataItem{Timestamp: timestamp, Value: result})
targetData.Data["result"] = append(targetData.Data["result"], []interface{}{timestamp, result})
}
}
targetMetricData = append(targetMetricData, targetData)
@ -561,9 +554,6 @@ func (engine *Engine) CheckCondition(rule *alerting.Rule) (*alerting.ConditionRe
if err != nil {
return conditionResult, err
}
if rule.BucketConditions != nil {
return engine.CheckBucketCondition(rule, targetMetricData, queryResult)
}
for idx, targetData := range targetMetricData {
if idx == 0 {
sort.Slice(rule.Conditions.Items, func(i, j int) bool {
@ -589,16 +579,16 @@ func (engine *Engine) CheckCondition(rule *alerting.Rule) (*alerting.ConditionRe
triggerCount := 0
for i := 0; i < dataLength; i++ {
//clear nil value
if targetData.Data[dataKey][i].Value == nil {
if targetData.Data[dataKey][i][1] == nil {
continue
}
if r, ok := targetData.Data[dataKey][i].Value.(float64); ok {
if r, ok := targetData.Data[dataKey][i][1].(float64); ok {
if math.IsNaN(r) {
continue
}
}
evaluateResult, err := expression.Evaluate(map[string]interface{}{
"result": targetData.Data[dataKey][i].Value,
"result": targetData.Data[dataKey][i][1],
})
if err != nil {
return conditionResult, fmt.Errorf("evaluate rule [%s] error: %w", rule.ID, err)
@ -613,12 +603,12 @@ func (engine *Engine) CheckCondition(rule *alerting.Rule) (*alerting.ConditionRe
resultItem := alerting.ConditionResultItem{
GroupValues: targetData.GroupValues,
ConditionItem: &cond,
ResultValue: targetData.Data[dataKey][i].Value,
IssueTimestamp: targetData.Data[dataKey][i].Timestamp,
ResultValue: targetData.Data[dataKey][i][1],
IssueTimestamp: targetData.Data[dataKey][i][0],
RelationValues: map[string]interface{}{},
}
for _, metric := range rule.Metrics.Items {
resultItem.RelationValues[metric.Name] = queryResult.MetricData[idx].Data[metric.Name][i].Value
resultItem.RelationValues[metric.Name] = queryResult.MetricData[idx].Data[metric.Name][i][1]
}
resultItems = append(resultItems, resultItem)
break LoopCondition
@ -631,155 +621,6 @@ func (engine *Engine) CheckCondition(rule *alerting.Rule) (*alerting.ConditionRe
conditionResult.ResultItems = resultItems
return conditionResult, nil
}
type BucketDiffState struct {
ContentChangeState int
DocCount int
}
func (engine *Engine) CheckBucketCondition(rule *alerting.Rule, targetMetricData []alerting.MetricData, queryResult *alerting.QueryResult) (*alerting.ConditionResult, error) {
var resultItems []alerting.ConditionResultItem
conditionResult := &alerting.ConditionResult{
QueryResult: queryResult,
}
//transform targetMetricData
var (
times = map[int64]struct{}{}
buckets = map[string]map[int64]int{}
maxTime int64
minTime = time.Now().UnixMilli()
)
for _, targetData := range targetMetricData {
for _, v := range targetData.Data {
for _, item := range v {
if tv, ok := item.Timestamp.(float64); ok {
timestamp := int64(tv)
if timestamp < minTime {
minTime = timestamp
}
if timestamp > maxTime {
maxTime = timestamp
}
if _, ok = times[timestamp]; !ok {
times[timestamp] = struct{}{}
}
bucketKey := strings.Join(targetData.GroupValues, "*")
if _, ok = buckets[bucketKey]; !ok {
buckets[bucketKey] = map[int64]int{}
}
buckets[bucketKey][timestamp] = item.DocCount
} else {
log.Warnf("invalid timestamp type: %T", item.Timestamp)
}
}
}
}
var timesArr []int64
for t := range times {
timesArr = append(timesArr, t)
}
sort.Slice(timesArr, func(i, j int) bool {
return timesArr[i] < timesArr[j] // Ascending order
})
// Remove the first bucket if its timestamp equals minTime, and
// the last bucket if its timestamp equals maxTime
if len(timesArr) > 0 && timesArr[0] == minTime {
// Remove first bucket if timestamp matches minTime
timesArr = timesArr[1:]
}
if len(timesArr) > 0 && timesArr[len(timesArr)-1] == maxTime {
// Remove last bucket if timestamp matches maxTime
timesArr = timesArr[:len(timesArr)-1]
}
//check bucket diff
diffResult := map[string]map[int64]BucketDiffState{}
for grps, bk := range buckets {
hasPre := false
if _, ok := diffResult[grps]; !ok {
diffResult[grps] = map[int64]BucketDiffState{}
}
for i, t := range timesArr {
if v, ok := bk[t]; !ok {
if hasPre {
diffResult[grps][t] = BucketDiffState{
ContentChangeState: -1,
}
}
// reset hasPre to false
hasPre = false
} else {
if !hasPre {
if i > 0 {
diffResult[grps][t] = BucketDiffState{
ContentChangeState: 1,
}
}
} else {
diffResult[grps][t] = BucketDiffState{
ContentChangeState: 0,
DocCount: v - bk[timesArr[i-1]],
}
}
hasPre = true
}
}
}
sort.Slice(rule.BucketConditions.Items, func(i, j int) bool {
return alerting.PriorityWeights[rule.BucketConditions.Items[i].Priority] > alerting.PriorityWeights[rule.BucketConditions.Items[j].Priority]
})
for grps, states := range diffResult {
LoopCondition:
for _, cond := range rule.BucketConditions.Items {
conditionExpression, err := cond.GenerateConditionExpression()
if err != nil {
return conditionResult, err
}
expression, err := govaluate.NewEvaluableExpression(conditionExpression)
if err != nil {
return conditionResult, err
}
triggerCount := 0
for t, state := range states {
resultValue := state.DocCount
if cond.Type == alerting.BucketDiffTypeContent {
resultValue = state.ContentChangeState
}
evaluateResult, err := expression.Evaluate(map[string]interface{}{
"result": resultValue,
})
if err != nil {
return conditionResult, fmt.Errorf("evaluate rule [%s] error: %w", rule.ID, err)
}
if evaluateResult == true {
triggerCount += 1
} else {
triggerCount = 0
}
if triggerCount >= cond.MinimumPeriodMatch {
groupValues := strings.Split(grps, "*")
log.Debugf("triggered condition %v, groups: %v\n", cond, groupValues)
resultItem := alerting.ConditionResultItem{
GroupValues: groupValues,
ConditionItem: &cond,
ResultValue: resultValue,
IssueTimestamp: t,
RelationValues: map[string]interface{}{},
}
resultItems = append(resultItems, resultItem)
break LoopCondition
}
}
}
}
conditionResult.QueryResult.MetricData = targetMetricData
conditionResult.ResultItems = resultItems
return conditionResult, nil
}
func (engine *Engine) Do(rule *alerting.Rule) error {
var (
@ -914,9 +755,15 @@ func (engine *Engine) Do(rule *alerting.Rule) error {
})
alertItem.Priority = priority
var newAlertMessage *alerting.AlertMessage
title, message := rule.GetNotificationTitleAndMessage()
err = attachTitleMessageToCtx(title, message, paramsCtx)
if err != nil {
return err
}
alertItem.Message = paramsCtx[alerting2.ParamMessage].(string)
alertItem.Title = paramsCtx[alerting2.ParamTitle].(string)
if alertMessage == nil || alertMessage.Status == alerting.MessageStateRecovered {
newAlertMessage = &alerting.AlertMessage{
msg := &alerting.AlertMessage{
RuleID: rule.ID,
Created: alertItem.Created,
Updated: time.Now(),
@ -925,25 +772,13 @@ func (engine *Engine) Do(rule *alerting.Rule) error {
ResourceName: rule.Resource.Name,
Status: alerting.MessageStateAlerting,
Priority: priority,
Title: alertItem.Title,
Message: alertItem.Message,
Tags: rule.Tags,
Category: rule.Category,
}
paramsCtx[alerting2.ParamEventID] = newAlertMessage.ID
} else {
paramsCtx[alerting2.ParamEventID] = alertMessage.ID
}
title, message := rule.GetNotificationTitleAndMessage()
err = attachTitleMessageToCtx(title, message, paramsCtx)
if err != nil {
return err
}
alertItem.Message = paramsCtx[alerting2.ParamMessage].(string)
alertItem.Title = paramsCtx[alerting2.ParamTitle].(string)
if newAlertMessage != nil {
alertMessage = newAlertMessage
alertMessage.Title = alertItem.Title
alertMessage.Message = alertItem.Message
err = saveAlertMessage(newAlertMessage)
alertMessage = msg
err = saveAlertMessage(msg)
if err != nil {
return fmt.Errorf("save alert message error: %w", err)
}
@ -978,10 +813,10 @@ func (engine *Engine) Do(rule *alerting.Rule) error {
log.Debugf("check condition result of rule %s is %v", conditionResults, rule.ID)
// if alert message status equals ignored , then skip sending message to channel
if alertMessage.Status == alerting.MessageStateIgnored {
if alertMessage != nil && alertMessage.Status == alerting.MessageStateIgnored {
return nil
}
if paramsCtx != nil {
if alertMessage != nil && paramsCtx != nil {
paramsCtx[alerting2.ParamEventID] = alertMessage.ID
}
// if channel is not enabled return
@ -1300,16 +1135,12 @@ func collectMetricData(agg interface{}, groupValues string, metricData *[]alerti
if timeBks, ok := aggM["time_buckets"].(map[string]interface{}); ok {
if bks, ok := timeBks["buckets"].([]interface{}); ok {
md := alerting.MetricData{
Data: map[string][]alerting.MetricDataItem{},
Data: map[string][]alerting.TimeMetricData{},
GroupValues: strings.Split(groupValues, "*"),
}
for _, bk := range bks {
if bkM, ok := bk.(map[string]interface{}); ok {
var docCount int
if v, ok := bkM["doc_count"]; ok {
docCount = int(v.(float64))
}
for k, v := range bkM {
if k == "key" || k == "key_as_string" || k == "doc_count" {
continue
@ -1319,20 +1150,20 @@ func collectMetricData(agg interface{}, groupValues string, metricData *[]alerti
}
if vm, ok := v.(map[string]interface{}); ok {
if metricVal, ok := vm["value"]; ok {
md.Data[k] = append(md.Data[k], alerting.MetricDataItem{Timestamp: bkM["key"], Value: metricVal, DocCount: docCount})
md.Data[k] = append(md.Data[k], alerting.TimeMetricData{bkM["key"], metricVal})
} else {
//percentiles agg type
switch vm["values"].(type) {
case []interface{}:
for _, val := range vm["values"].([]interface{}) {
if valM, ok := val.(map[string]interface{}); ok {
md.Data[k] = append(md.Data[k], alerting.MetricDataItem{Timestamp: bkM["key"], Value: valM["value"], DocCount: docCount})
md.Data[k] = append(md.Data[k], alerting.TimeMetricData{bkM["key"], valM["value"]})
}
break
}
case map[string]interface{}:
for _, val := range vm["values"].(map[string]interface{}) {
md.Data[k] = append(md.Data[k], alerting.MetricDataItem{Timestamp: bkM["key"], Value: val, DocCount: docCount})
md.Data[k] = append(md.Data[k], alerting.TimeMetricData{bkM["key"], val})
break
}
}

View File

@ -20,7 +20,7 @@ const timeOuts = [
];
const TimeSetting = props => {
const { currentLocales, timeFields = [], showTimeField, showTimeInterval, timeIntervalDisabled = false, showTimeout, onTimeSettingChange, onCancel } = props;
const { currentLocales, timeFields = [], showTimeField, showTimeInterval, showTimeout, onTimeSettingChange, onCancel } = props;
const [isAuto, setIsAuto] = useState(!props.timeInterval)
const [timeField, setTimeField] = useState(props.timeField);
@ -78,7 +78,7 @@ const TimeSetting = props => {
<div className={styles.label}>
{currentLocales[`datepicker.time_setting.time_interval`]}
<div className={styles.auto}>
<Switch disabled={timeIntervalDisabled} size="small" checked={isAuto} onChange={(checked) => {
<Switch size="small" checked={isAuto} onChange={(checked) => {
setIsAuto(checked)
if (checked) {
timeIntervalCache.current = timeInterval;
@ -89,13 +89,6 @@ const TimeSetting = props => {
}}/> {currentLocales[`datepicker.time_setting.time_interval.auto`]}
</div>
</div>
{
timeIntervalDisabled && isAuto && (
<div className={styles.help}>
{currentLocales[`datepicker.time_setting.time_interval.help`]}
</div>
)
}
<div className={styles.form}>
{
!isAuto && timeIntervalObject && (

View File

@ -37,12 +37,6 @@
justify-content: space-between;
gap: 8px;
}
.help {
color: rgba(0, 0, 0, 0.45);
font-size: 12px;
word-break: break-all;
}
}
.apply {

View File

@ -87,7 +87,6 @@ const DatePicker = (props) => {
timeFields = [],
showTimeInterval = false,
timeInterval,
timeIntervalDisabled = false,
showTimeout = false,
timeout,
autoFitLoading = false,
@ -215,7 +214,7 @@ const DatePicker = (props) => {
isMinimum ? styles.minimum : ""
} ${className}`}
>
<Button.Group className={styles.RangeBox} style={{ width: onRefresh ? 'calc(100% - 64px)' : 'calc(100% - 32px)'}}>
<Button.Group className={styles.RangeBox}>
{!isMinimum && (
<Button
className={`${styles.iconBtn} common-ui-datepicker-backward`}

View File

@ -42,8 +42,8 @@
align-items: center;
margin-left: 4px !important;
.play {
min-width: 32px;
max-width: 32px;
min-width: 30px;
max-width: 30px;
padding: 0;
font-size: 14px;
color: #1890ff;

View File

@ -23,7 +23,6 @@
| timeFields | 时间字段列表 | string[] | [] | 1.0.0 |
| showTimeInterval | 是否显示时间间隔 | boolean | false | 1.0.0 |
| timeInterval | 时间间隔 | string | - | 1.0.0 |
| timeIntervalDisabled | 禁用时间间隔 | boolean | false | 1.0.0 |
| showTimeout | 是否显示超时时间 | boolean | false | 1.0.0 |
| timeout | 超时时间 | string | 10s | 1.0.0 |
| onTimeSettingChange | 时间配置变更的回调 | ({timeField: string, timeInterval: string, timeout: string}) => void | - | 1.0.0 |

View File

@ -21,7 +21,6 @@ export default {
"datepicker.time_setting.time_field": "Time field",
"datepicker.time_setting.time_interval": "Time interval",
"datepicker.time_setting.time_interval.auto": "Auto",
"datepicker.time_setting.time_interval.help": "Because of the long time range, time interval can only be calculated automatically.",
"datepicker.time_setting.time_interval.ms": "Millisecond",
"datepicker.time_setting.time_interval.s": "Second",
"datepicker.time_setting.time_interval.m": "Minute",

View File

@ -21,7 +21,6 @@ export default {
"datepicker.time_setting.time_field": "时间字段",
"datepicker.time_setting.time_interval": "时间间隔",
"datepicker.time_setting.time_interval.auto": "自动",
"datepicker.time_setting.time_interval.help": "由于时间跨度较长,仅支持自动计算时间间隔。",
"datepicker.time_setting.time_interval.ms": "毫秒",
"datepicker.time_setting.time_interval.s": "秒",
"datepicker.time_setting.time_interval.m": "分",

View File

@ -8,6 +8,7 @@ import { formatMessage } from "umi/locale";
import { getDocPathByLang, getWebsitePathByLang } from "@/utils/utils";
export default ({autoInit = false}) => {
const { loading, value } = useFetch(`/instance/_search`);
const [tokenLoading, setTokenLoading] = useState(false);
@ -17,6 +18,7 @@ export default ({autoInit = false}) => {
const fetchTokenInfo = async () => {
setTokenInfo()
// if (seletedGateways.length === 0) return;
setTokenLoading(true)
const res = await request('/instance/_generate_install_script', {
method: "POST",
@ -33,10 +35,32 @@ export default ({autoInit = false}) => {
}
}, [])
const gateways = value?.hits?.hits || []
return (
<Spin spinning={tokenLoading}>
<Spin spinning={loading || tokenLoading}>
<div className={styles.installAgent}>
{/* <Form className={styles.gateway} layout="vertical">
<Form.Item label="选择接入网关" required>
<Select
mode="multiple"
style={{ width: '100%' }}
onChange={(value) => setSeletedGateways(value)}
onBlur={() => fetchTokenInfo()}
>
{
gateways.map((item) => (
<Select.Option key={item._source.endpoint}>
<span>
<span style={{marginRight: 4}}>{item._source.name}</span>
<span>[{item._source.endpoint}]</span>
</span>
</Select.Option>
))
}
</Select>
</Form.Item>
</Form> */}
{!autoInit && <Button className={styles.gateway} type="primary" onClick={() => fetchTokenInfo()}>
{formatMessage({
id:"agent.install.label.get_cmd"

View File

@ -15,7 +15,7 @@ import { formatMessage } from "umi/locale";
import DatePicker from "@/common/src/DatePicker";
import { getLocale } from "umi/locale";
import { getTimezone } from "@/utils/utils";
import { getAllTimeSettingsCache, initState, TIME_SETTINGS_KEY } from "../../Monitor";
import { getAllTimeSettingsCache, TIME_SETTINGS_KEY } from "../../Monitor";
const { TabPane } = Tabs;
@ -33,27 +33,37 @@ export default (props) => {
const allTimeSettingsCache = getAllTimeSettingsCache() || {}
const [spinning, setSpinning] = useState(false);
const [state, setState] = useState(initState({
const [state, setState] = useState({
timeRange: {
min: "now-15m",
max: "now",
timeFormatter: formatter.dates(1),
},
timeInterval: allTimeSettingsCache.timeInterval,
timeout: allTimeSettingsCache.timeout || '10s',
}));
});
const [refresh, setRefresh] = useState({ isRefreshPaused: allTimeSettingsCache.isRefreshPaused || false, refreshInterval: allTimeSettingsCache.refreshInterval || 30000 });
const [timeZone, setTimeZone] = useState(() => allTimeSettingsCache.timeZone || getTimezone());
const handleTimeChange = ({ start, end, timeInterval, timeout }) => {
setState(initState({
const bounds = calculateBounds({
from: start,
to: end,
});
const day = moment
.duration(bounds.max.valueOf() - bounds.min.valueOf())
.asDays();
const intDay = parseInt(day) + 1;
setState({
timeRange: {
min: start,
max: end,
timeFormatter: formatter.dates(intDay),
},
timeInterval: timeInterval || state.timeInterval,
timeout: timeout || state.timeout
}));
});
setSpinning(true);
};
@ -105,7 +115,6 @@ export default (props) => {
showTimeout={true}
timeout={state.timeout}
timeInterval={state.timeInterval}
timeIntervalDisabled={state.timeIntervalDisabled}
onTimeSettingChange={(timeSetting) => {
onTimeSettingsChange({
timeInterval: timeSetting.timeInterval,

View File

@ -54,36 +54,6 @@ export const getAllTimeSettingsCache = () => {
}
}
const getDuration = (from, to) => {
if (!from || !to) return;
const bounds = calculateBounds({
from,
to,
});
return bounds.max.valueOf() - bounds.min.valueOf()
}
export const initState = (state = {}) => {
const { timeRange, timeInterval, timeout } = state || {}
const from = timeRange?.min || "now-15m"
const to = timeRange?.max || "now"
const duration = getDuration(from, to);
const gtOneHour = moment.duration(duration).asHours() > 1
const day = moment.duration(duration).asDays();
const intDay = parseInt(day) + 1;
return {
...state,
timeRange: {
min: from,
max: to,
timeFormatter: formatter.dates(intDay),
},
timeInterval: gtOneHour ? undefined : timeInterval,
timeIntervalDisabled: gtOneHour,
timeout: timeout || '10s',
}
}
const Monitor = (props) => {
const {
selectedCluster,
@ -101,16 +71,19 @@ const Monitor = (props) => {
const [spinning, setSpinning] = useState(false);
const [state, setState] = useState(formatState(initState({
timeRange: {
min: param?.timeRange?.min || "now-15m",
max: param?.timeRange?.max || "now",
},
timeInterval: formatTimeInterval(param?.timeInterval) || allTimeSettingsCache.timeInterval,
timeout: formatTimeout(param?.timeout) || allTimeSettingsCache.timeout || '10s',
param: param,
refresh: true,
})));
const [state, setState] = useState(
formatState({
timeRange: {
min: param?.timeRange?.min || "now-15m",
max: param?.timeRange?.max || "now",
timeFormatter: formatter.dates(1),
},
timeInterval: formatTimeInterval(param?.timeInterval) || allTimeSettingsCache.timeInterval,
timeout: formatTimeout(param?.timeout) || allTimeSettingsCache.timeout || '10s',
param: param,
refresh: true,
})
);
const [refresh, setRefresh] = useState({ isRefreshPaused: typeof allTimeSettingsCache.isRefreshPaused !== 'undefined' ? allTimeSettingsCache.isRefreshPaused : true, refreshInterval: allTimeSettingsCache.refreshInterval || 30000 });
const [timeZone, setTimeZone] = useState(() => allTimeSettingsCache.timeZone || getTimezone());
@ -119,24 +92,31 @@ const Monitor = (props) => {
setParam({ ...param, timeRange: state.timeRange, timeInterval: state.timeInterval, timeout: state.timeout });
}, [state.timeRange, state.timeInterval, state.timeout]);
const handleTimeChange = ({ start, end, timeInterval, timeout, refresh }) => {
setState(initState({
const handleTimeChange = useCallback(({ start, end, timeInterval, timeout, refresh }) => {
const bounds = calculateBounds({
from: start,
to: end,
});
const day = moment
.duration(bounds.max.valueOf() - bounds.min.valueOf())
.asDays();
const intDay = parseInt(day) + 1;
setState({
...state,
param,
timeRange: {
min: start,
max: end,
timeFormatter: formatter.dates(intDay),
},
timeInterval: timeInterval || state.timeInterval,
timeout: timeout || state.timeout,
refresh
}));
}
});
}, [state])
const onInfoChange = (info) => {
setState({
...state,
param,
info,
});
};
@ -153,12 +133,9 @@ const Monitor = (props) => {
const breadcrumbList = getBreadcrumbList(state);
const isAgent = useMemo(() => {
const { metric_collection_mode, monitor_configs = {} } = selectedCluster || {}
if (typeof metric_collection_mode === 'undefined') {
return monitor_configs?.node_stats?.enabled === false && monitor_configs?.index_stats?.enabled === false
}
return metric_collection_mode === 'agent'
}, [JSON.stringify(selectedCluster)])
const { monitor_configs = {} } = selectedCluster || {}
return monitor_configs?.node_stats?.enabled === false && monitor_configs?.index_stats?.enabled === false
}, [JSON.stringify(selectedCluster?.monitor_configs)])
return (
<div>
@ -170,48 +147,44 @@ const Monitor = (props) => {
<>
<div style={{ marginBottom: 5 }}>
<div style={{ display: 'flex', justifyContent: 'space-between' }}>
<div style={{ maxWidth: 600 }}>
<DatePicker
locale={getLocale()}
start={state.timeRange.min}
end={state.timeRange.max}
onRangeChange={({ start, end }) => {
handleTimeChange({ start, end })
}}
{...refresh}
onRefreshChange={(newRefresh) => {
onTimeSettingsChange(newRefresh)
setRefresh(newRefresh)
}}
onRefresh={(value) => handleTimeChange({ ...(value || {}), refresh: new Date().valueOf()})}
showTimeSetting={true}
showTimeInterval={true}
timeInterval={state.timeInterval}
timeIntervalDisabled={state.timeIntervalDisabled}
showTimeout={true}
timeout={state.timeout}
onTimeSettingChange={(timeSetting) => {
onTimeSettingsChange({
timeInterval: timeSetting.timeInterval,
timeout: timeSetting.timeout
})
setState({
...state,
param,
timeInterval: timeSetting.timeInterval,
timeout: timeSetting.timeout
});
}}
timeZone={timeZone}
onTimeZoneChange={(timeZone) => {
onTimeSettingsChange({
timeZone,
})
setTimeZone(timeZone)
}}
recentlyUsedRangesKey={'monitor'}
/>
</div>
<DatePicker
locale={getLocale()}
start={state.timeRange.min}
end={state.timeRange.max}
onRangeChange={({ start, end }) => {
handleTimeChange({ start, end })
}}
{...refresh}
onRefreshChange={(newRefresh) => {
onTimeSettingsChange(newRefresh)
setRefresh(newRefresh)
}}
onRefresh={handleTimeChange}
showTimeSetting={true}
showTimeInterval={true}
timeInterval={state.timeInterval}
showTimeout={true}
timeout={state.timeout}
onTimeSettingChange={(timeSetting) => {
onTimeSettingsChange({
timeInterval: timeSetting.timeInterval,
timeout: timeSetting.timeout
})
setState({
...state,
timeInterval: timeSetting.timeInterval,
timeout: timeSetting.timeout
});
}}
timeZone={timeZone}
onTimeZoneChange={(timeZone) => {
onTimeSettingsChange({
timeZone,
})
setTimeZone(timeZone)
}}
recentlyUsedRangesKey={'monitor'}
/>
<CollectStatus fetchUrl={`${ESPrefix}/${selectedCluster?.id}/_collection_stats`}/>
</div>
</div>
@ -226,7 +199,7 @@ const Monitor = (props) => {
animated={false}
>
{panes.map((pane) => (
<TabPane tab={formatMessage({id: `cluster.monitor.tabs.${pane.key}`})} key={pane.key}>
<TabPane tab={pane.title} key={pane.key}>
<Spin spinning={spinning && !!state.refresh}>
<StatisticBar
setSpinning={setSpinning}
@ -254,7 +227,6 @@ const Monitor = (props) => {
})
setState({
...state,
param,
timeInterval,
});
}}

View File

@ -72,7 +72,7 @@ function createTimeRangeFilter(
{
...(bounds.min && { gte: bounds.min.toISOString() }),
...(bounds.max && { lte: bounds.max.toISOString() }),
// format: 'strict_date_optional_time',
format: 'strict_date_optional_time',
},
indexPattern
);

View File

@ -33,7 +33,7 @@ export const createFilterDateHistogram = (
{
gte: start.toISOString(),
lt: start.add(interval).toISOString(),
// format: 'strict_date_optional_time',
format: 'strict_date_optional_time',
},
agg.getIndexPattern()
);

View File

@ -26,7 +26,7 @@ export const createFilterDateRange = (agg: IBucketAggConfig, { from, to }: DateR
const filter: RangeFilterParams = {};
if (from) filter.gte = moment(from).toISOString();
if (to) filter.lt = moment(to).toISOString();
// if (to && from) filter.format = 'strict_date_optional_time';
if (to && from) filter.format = 'strict_date_optional_time';
return buildRangeFilter(agg.params.field, filter, agg.getIndexPattern());
};

View File

@ -56,9 +56,9 @@ export async function createFiltersFromRangeSelectAction(event: RangeSelectConte
lt: isDate ? moment(max).toISOString() : max,
};
// if (isDate) {
// range.format = 'strict_date_optional_time';
// }
if (isDate) {
range.format = 'strict_date_optional_time';
}
return esFilters.mapAndFlattenFilters([esFilters.buildRangeFilter(field, range, indexPattern)]);
}

View File

@ -121,9 +121,6 @@ function FilterBarUI(props: Props) {
onCancel={() => setIsAddFilterPopoverOpen(false)}
key={JSON.stringify(newFilter)}
services={props.services}
dateRangeFrom={props.dateRangeFrom}
dateRangeTo={props.dateRangeTo}
timeField={props.timeField}
/>
</div>
</EuiFlexItem>

View File

@ -313,9 +313,6 @@ class FilterEditorUI extends Component<Props, State> {
onChange={this.onParamsChange}
data-test-subj="phraseValueInput"
services={this.props.services}
dateRangeFrom={this.props.dateRangeFrom}
dateRangeTo={this.props.dateRangeTo}
timeField={this.props.timeField}
/>
);
case 'phrases':
@ -326,9 +323,6 @@ class FilterEditorUI extends Component<Props, State> {
values={this.state.params}
onChange={this.onParamsChange}
services={this.props.services}
dateRangeFrom={this.props.dateRangeFrom}
dateRangeTo={this.props.dateRangeTo}
timeField={this.props.timeField}
/>
);
case 'range':

View File

@ -82,30 +82,17 @@ export class PhraseSuggestorUI<
protected updateSuggestions = debounce(async (query: string = "") => {
if (this.abortController) this.abortController.abort();
this.abortController = new AbortController();
const { indexPattern, field, dateRangeFrom, dateRangeTo, timeField } = this.props as PhraseSuggestorProps;
const { indexPattern, field } = this.props as PhraseSuggestorProps;
if (!field || !this.isSuggestingValues()) {
return;
}
this.setState({ isLoading: true });
const boolFilter = []
if (dateRangeFrom && dateRangeTo && timeField) {
boolFilter.push({
"range": {
[timeField]: {
"gte": dateRangeFrom,
"lte": dateRangeTo
}
}
})
}
const suggestions = await this.props.services.data.autocomplete.getValueSuggestions(
{
indexPattern,
field,
query,
boolFilter,
signal: this.abortController.signal,
}
);

View File

@ -50,7 +50,8 @@ class PhraseValueInputUI extends PhraseSuggestorUI<Props> {
}
private renderWithSuggestions() {
const suggestions = Array.isArray(this.state.suggestions) ? this.state.suggestions : []
let { suggestions } = this.state;
suggestions = suggestions || [];
const { value, intl, onChange } = this.props;
// there are cases when the value is a number, this would cause an exception
const valueAsStr = String(value);

View File

@ -59,7 +59,7 @@
@include euiBreakpoint("m", "l", "xl") {
.kbnQueryBar__datePickerWrapper {
// sass-lint:disable-block no-important
max-width: 400px;
max-width: 340px;
flex-grow: 0 !important;
flex-basis: auto !important;
margin-right: -$euiSizeXS;

View File

@ -264,7 +264,7 @@ export default function QueryBarTopRow(props: QueryBarTopRowProps) {
return (
<NoDataPopover storage={storage} showNoDataPopover={props.indicateNoData}>
<EuiFlexGroup responsive={false} gutterSize="s">
{/* {renderHistogram()} */}
{renderHistogram()}
{renderDatePicker()}
<EuiFlexItem grow={false}>{button}</EuiFlexItem>
</EuiFlexGroup>

View File

@ -484,9 +484,6 @@ class SearchBarUI extends Component<SearchBarProps, State> {
filters={this.props.filters!}
onFiltersUpdated={this.props.onFiltersUpdated}
indexPatterns={this.props.indexPatterns!}
dateRangeFrom={this.state.dateRangeFrom}
dateRangeTo={this.state.dateRangeTo}
timeField={this.props.timeSetting?.timeField}
services={this.props.services}
/>
</div>

View File

@ -88,7 +88,7 @@ export class DiscoverHistogram extends Component {
render() {
const timeZone = getTimezone();
const { chartData, height = 100 } = this.props;
const { chartData } = this.props;
const { chartsTheme, chartsBaseTheme } = this.state;
@ -149,7 +149,7 @@ export class DiscoverHistogram extends Component {
//console.log(data)
return (
<Chart size={{ height }}>
<Chart size={{ height: 40 }}>
<Settings
xDomain={xDomain}
onBrushEnd={this.onBrushEnd}

View File

@ -121,7 +121,6 @@ export default {
"form.button.restart": "Restart",
"form.button.verify": "Verify",
"form.button.clean": "Clean",
"form.button.view_logs": "View Logs",
"form.button.clean.confim.desc": "Are you sure to clean data that is {status}?",
"form.button.clean.unavailable.nodes": "Clean unavailable nodes",
"form.button.clean.unavailable.nodes.desc": "Are you sure to clean nodes that are unavailable within seven days?",

View File

@ -6,12 +6,7 @@ export default {
"agent.instance.associate.labels.select_cluster": "Select Cluster",
"agent.instance.associate.tips.associate":
"Please select cluster(s) to enroll !",
"agent.instance.associate.set_credential": "Set credential for agent",
"agent.instance.associate.set_credential.tips":
"This permission will be used for metrics and log collection. It is recommended to use a user with a reasonable permission range.",
"agent.instance.associate.tips.connected": "Connection succeeded!",
"agent.instance.associate.tips.connected.check": "please set a credential for agent",
"agent.instance.associate.auth.error": "The following clusters need to set credentials for the agent:",
"agent.instance.associate.tips.metric":
"After enroll, the agent will collect metrics for the enrolled cluster",
"agent.instance.associate.tips.unregister":
@ -40,10 +35,4 @@ export default {
"agent.install.setup.copy.success": "Copied to clipboard successfully!",
"agent.instance.auto_associate.title": "Auto Enroll",
"agent.instance.install.title": "Install Agent",
"agent.label.agent_credential": "Agent Credential",
"agent.credential.tip": "No credential required",
"agent.instance.clear.title": "Clear Offline Instances",
"agent.instance.clear.modal.title": "Are you sure you want to clear offline instances?",
"agent.instance.clear.modal.desc": "This operation will delete offline instances that have not reported metrics for 7 days."
};

View File

@ -206,11 +206,7 @@ export default {
"alert.rule.table.columnns.objects": "Objects",
"alert.rule.table.columnns.schedule": "Schedule",
"alert.rule.table.columnns.expression": "Expression",
"alert.rule.table.columnns.condition.type": "Condition Type",
"alert.rule.table.columnns.condition": "Condition",
"alert.rule.table.columnns.status": "Status",
"alert.rule.table.columnns.status.failed": "Connect failed",
"alert.rule.table.columnns.status.succeeded": "Connect succeeded",
"alert.rule.table.columnns.enabled": "Enabled",
"alert.rule.table.columnns.updated": "Updated time",
"alert.rule.table.columnns.category": "Category",
@ -225,7 +221,6 @@ export default {
//Configure alert objects 配置告警对象
"alert.rule.form.title.configure_alert_object": "Configure alert objects",
"alert.rule.form.label.alert_metric": "Metrics",
"alert.rule.form.label.bucket_label_template": "Bucket Label Template",
"alert.rule.form.label.alert_metric.groups": "Groups",
"alert.rule.form.label.alert_metric.button.add_group": "Add group",
"alert.rule.form.label.alert_metric.button.add_metric": "Add metrics",
@ -240,13 +235,7 @@ export default {
"alert.rule.form.label.alert_condition": "Conditions",
"alert.rule.form.label.event_title": "Event title",
"alert.rule.form.label.event_message": "Event message",
"alert.rule.form.label.metrics_value": "Metrics value",
"alert.rule.form.label.buckets_diff": "Buckets diff",
"alert.rule.form.label.above_metric": "Above metrics",
"alert.rule.form.label.size": "Doc diff",
"alert.rule.form.label.content": "Content diff",
"alert.rule.form.label.in": "In",
"alert.rule.form.label.content.changed": "Changed",
"alert.rule.form.label.lasts_periods": "Lasts {num} periods",
"alert.rule.form.button.add_condition": "Add condition",
"alert.rule.form.label.trigger": "Trigger",
@ -335,9 +324,7 @@ export default {
"alert.message.detail.action_result": "Execution result",
"alert.message.detail.action_result_error": "Exection error",
"alert.message.detail.alert_info": "Alert Detail",
"alert.message.detail.condition.type": "Condition Type",
"alert.message.detail.condition": "Condition",
"alert.message.detail.bucket_diff_type": "Bucket Diff Type",
"alert.message.detail.recover_time": "Recovered Time",
"alert.message.detail.title.event_detail": "Event Detail",
"alert.message.detail.title.summary": "Summary",

View File

@ -35,7 +35,6 @@ export default {
"cluster.manage.table.column.location": "Location",
"cluster.manage.monitored.on": "ON",
"cluster.manage.monitored.off": "OFF",
"cluster.manage.metric_collection_mode": "Collect Mode",
"cluster.manage.monitor_configs.cluster_health": "Cluster health",
"cluster.manage.monitor_configs.cluster_stats": "Cluster stats",
"cluster.manage.monitor_configs.node_stats": "Node stats",
@ -124,23 +123,6 @@ export default {
"cluster.monitor.topn.color": "Color Metric",
"cluster.monitor.topn.theme": "Theme",
"cluster.monitor.logs.timestamp": "Timestamp",
"cluster.monitor.logs.type": "Type",
"cluster.monitor.logs.level": "Level",
"cluster.monitor.logs.node": "Node",
"cluster.monitor.logs.message": "Message",
"cluster.monitor.logs.search.placeholder": "Search message",
"cluster.monitor.logs.empty.agent": "No data, please change the time range or check if the Agent is working properly.",
"cluster.monitor.logs.empty.agentless": "No data, please install the Agent and change the cluster collection mode to Agent.",
"cluster.monitor.tabs.overview": "Overview",
"cluster.monitor.tabs.advanced": "Advanced",
"cluster.monitor.tabs.topn": "TopN",
"cluster.monitor.tabs.logs": "Logs",
"cluster.monitor.tabs.nodes": "Nodes",
"cluster.monitor.tabs.indices": "Indices",
"cluster.monitor.tabs.shards": "Shards",
"cluster.metrics.axis.index_throughput.title": "Indexing Rate",
"cluster.metrics.axis.search_throughput.title": "Search Rate",
"cluster.metrics.axis.index_latency.title": "Indexing Latency",
@ -388,6 +370,4 @@ export default {
"cluster.collect.last_active_at": "Last Active At",
};

View File

@ -1,5 +1,5 @@
export default {
"error.split": ", ",
"error.unknown": "unknown error, please try again later or contact the support team!",
"error.request_timeout_error": "request timeout, please try again later!",
"error.request_timeout_error": "request timeout, please try again later or contact the support team!",
};

View File

@ -126,7 +126,6 @@ export default {
"form.button.restart": "重启",
"form.button.verify": "校验",
"form.button.clean": "清除",
"form.button.view_logs": "View Logs",
"form.button.clean.confim.desc": "确定删除状态为 {status} 的数据吗?",
"form.button.clean.unavailable.nodes": "清除不可用节点",
"form.button.clean.unavailable.nodes.desc": "确定清除7天内不可用的节点吗",

View File

@ -5,11 +5,7 @@ export default {
"agent.instance.associate.labels.cluster_version": "版本",
"agent.instance.associate.labels.select_cluster": "关联到集群",
"agent.instance.associate.tips.associate": "请选择要关联的集群!",
"agent.instance.associate.set_credential": "为代理设置凭据",
"agent.instance.associate.set_credential.tips": "此权限将用于度量和日志收集。建议使用具有合理权限范围的用户。",
"agent.instance.associate.tips.connected": "连接成功!",
"agent.instance.associate.tips.connected.check": "请设置凭据",
"agent.instance.associate.auth.error": "以下集群需要为 Agent 设置凭据:",
"agent.instance.associate.tips.metric":
"关联后 Agent 会对关联的集群进行指标采集操作",
"agent.instance.associate.tips.unregister":
@ -37,10 +33,4 @@ export default {
"agent.install.setup.copy.success": "已成功复制到剪贴板!",
"agent.instance.auto_associate.title": "自动关联集群",
"agent.instance.install.title": "安装 Agent",
"agent.label.agent_credential": "代理凭据",
"agent.credential.tip": "不需要凭据",
"agent.instance.clear.title": "清理离线实例",
"agent.instance.clear.modal.title": "您确定要清理离线实例?",
"agent.instance.clear.modal.desc": "该操作将会删除离线并且 7 天没有上报指标的实例"
};

View File

@ -194,11 +194,7 @@ export default {
"alert.rule.table.columnns.objects": "告警对象",
"alert.rule.table.columnns.schedule": "计划周期",
"alert.rule.table.columnns.expression": "告警规则",
"alert.rule.table.columnns.condition.type": "触发条件类型",
"alert.rule.table.columnns.condition": "触发条件",
"alert.rule.table.columnns.status": "运行状态",
"alert.rule.table.columnns.status.failed": "连接失败",
"alert.rule.table.columnns.status.succeeded": "连接成功",
"alert.rule.table.columnns.enabled": "告警启停",
"alert.rule.table.columnns.updated": "更新时间",
"alert.rule.table.columnns.category": "分类",
@ -213,7 +209,6 @@ export default {
//Configure alert objects 配置告警对象
"alert.rule.form.title.configure_alert_object": "配置告警对象",
"alert.rule.form.label.alert_metric": "告警指标",
"alert.rule.form.label.bucket_label_template": "分桶标签模板",
"alert.rule.form.label.alert_metric.groups": "指标分组",
"alert.rule.form.label.alert_metric.button.add_group": "添加分组",
"alert.rule.form.label.alert_metric.button.add_metric": "添加指标",
@ -228,13 +223,7 @@ export default {
"alert.rule.form.label.alert_condition": "告警条件",
"alert.rule.form.label.event_title": "事件标题",
"alert.rule.form.label.event_message": "事件内容",
"alert.rule.form.label.metrics_value": "指标数值",
"alert.rule.form.label.buckets_diff": "分桶对比",
"alert.rule.form.label.above_metric": "以上指标",
"alert.rule.form.label.size": "文档差异数",
"alert.rule.form.label.content": "内容差异数",
"alert.rule.form.label.in": "在",
"alert.rule.form.label.content.changed": "变更",
"alert.rule.form.label.lasts_periods": "持续{num}个周期",
"alert.rule.form.button.add_condition": "添加条件",
"alert.rule.form.label.trigger": "触发",
@ -318,9 +307,7 @@ export default {
"alert.message.detail.action_result": "执行结果",
"alert.message.detail.action_result_error": "规则执行错误",
"alert.message.detail.alert_info": "告警详情",
"alert.message.detail.condition.type": "触发条件类型",
"alert.message.detail.condition": "触发条件",
"alert.message.detail.bucket_diff_type": "分桶对比类型",
"alert.message.detail.recover_time": "恢复时间",
"alert.message.detail.title.event_detail": "事件详情",
"alert.message.detail.title.summary": "概览",

View File

@ -35,7 +35,6 @@ export default {
"cluster.manage.table.column.location": "位置",
"cluster.manage.monitored.on": "启用",
"cluster.manage.monitored.off": "关闭",
"cluster.manage.metric_collection_mode": "采集模式",
"cluster.manage.monitor_configs.cluster_health": "集群健康状态指标",
"cluster.manage.monitor_configs.cluster_stats": "集群指标",
"cluster.manage.monitor_configs.node_stats": "节点指标",
@ -115,23 +114,6 @@ export default {
"cluster.monitor.topn.color": "颜色指标",
"cluster.monitor.topn.theme": "主题",
"cluster.monitor.logs.timestamp": "时间戳",
"cluster.monitor.logs.type": "类型",
"cluster.monitor.logs.level": "等级",
"cluster.monitor.logs.node": "节点",
"cluster.monitor.logs.message": "日志",
"cluster.monitor.logs.search.placeholder": "搜索日志",
"cluster.monitor.logs.empty.agent": "没有数据,请更改时间范围或检查 Agent 是否正常工作。",
"cluster.monitor.logs.empty.agentless": "没有数据,请安装 Agent 并更改集群采集模式为 Agent 。",
"cluster.monitor.tabs.overview": "概览",
"cluster.monitor.tabs.advanced": "高级",
"cluster.monitor.tabs.topn": "TopN",
"cluster.monitor.tabs.logs": "日志",
"cluster.monitor.tabs.nodes": "节点",
"cluster.monitor.tabs.indices": "索引",
"cluster.monitor.tabs.shards": "分片",
"cluster.metrics.axis.index_throughput.title": "索引吞吐",
"cluster.metrics.axis.search_throughput.title": "查询吞吐",
"cluster.metrics.axis.index_latency.title": "索引延迟",

View File

@ -1,5 +1,5 @@
export default {
"error.split": "",
"error.unknown": "未知错误,请稍后重试或者联系支持团队!",
"error.request_timeout_error": "请求超时,请稍后重试",
"error.request_timeout_error": "请求超时,请稍后重试或者联系支持团队",
}

View File

@ -355,10 +355,7 @@ export default {
let idx = state.clusterList.findIndex((item) => item.id === payload.id);
idx > -1 && (state.clusterList[idx].name = payload.name);
if (state.selectedCluster?.id === payload.id) {
state.selectedCluster = {
...(state.selectedCluster || {}),
...(payload || {})
}
state.selectedCluster.monitor_configs = payload.monitor_configs
}
state.clusterStatus[payload.id].config.monitored = payload.monitored;
return state;

View File

@ -1,117 +0,0 @@
import { Alert, Button, Form, message } from "antd";
import { useState } from "react";
import { formatMessage } from "umi/locale";
import request from "@/utils/request";
import AgentCredentialForm, { MANUAL_VALUE } from "./AgentCredentialForm";
import { ESPrefix } from "@/services/common";
const formItemLayout = {
labelCol: {
xs: { span: 24 },
sm: { span: 5 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 18 },
},
};
export default Form.create()((props) => {
const { form, record, loading, tryConnect, onAgentCredentialSave } = props;
const [isManual, setIsManual] = useState(false);
const [saveLoading, setSaveLoading] = useState(false);
const needAuth = !!(record.credential_id || record.basic_auth?.username);
const onConfirm = async () => {
form.validateFields(async (errors, values) => {
if (errors) return;
setSaveLoading(true);
const { credential_id, basic_auth, metric_collection_mode } = record;
const res = await request(`${ESPrefix}/${record.id}`, {
method: "PUT",
body: {
credential_id,
basic_auth,
metric_collection_mode,
agent_credential_id:
values.agent_credential_id !== MANUAL_VALUE
? values.agent_credential_id
: undefined,
agent_basic_auth: {
username: values.agent_username,
password: values.agent_password,
},
},
});
if (res?.result === "updated") {
message.success(
formatMessage({
id: "app.message.update.success",
})
);
const res = await request(`/elasticsearch/${record.id}`);
if (res?.found) {
onAgentCredentialSave(res._source);
if (res._source?.agent_credential_id) {
setIsManual(false);
}
form.setFieldsValue({
agent_credential_id: res._source?.agent_credential_id
? res._source?.agent_credential_id
: res._source?.agent_basic_auth?.username
? MANUAL_VALUE
: undefined,
agent_username: res._source.agent_basic_auth?.username,
agent_password: res._source.agent_basic_auth?.password,
});
}
} else {
message.error(
formatMessage({
id: "app.message.update.failed",
})
);
}
setSaveLoading(false);
});
};
if (!needAuth) {
return (
<Alert
message={formatMessage({ id: "agent.credential.tip" })}
type="success"
/>
);
}
return (
<Form {...formItemLayout} colon={false}>
<AgentCredentialForm
btnLoading={loading}
needAuth={needAuth}
form={form}
initialValue={{
...record,
username: record.agent_basic_auth?.username,
password: record.agent_basic_auth?.password,
}}
isManual={isManual}
setIsManual={setIsManual}
isEdit={true}
tryConnect={tryConnect}
credentialRequired={false}
/>
<Form.Item label=" " colon={false}>
<div style={{ textAlign: "right" }}>
<Button loading={loading} type="primary" onClick={() => onConfirm()}>
{formatMessage({ id: "cluster.regist.step.confirm.title" })}
</Button>
</div>
</Form.Item>
</Form>
);
});

View File

@ -1,143 +0,0 @@
import React, { useEffect, useMemo, useState } from "react";
import { Button, Divider, Form, Input, Select, Row, Col } from "antd";
import { formatMessage } from "umi/locale";
import useFetch from "@/lib/hooks/use_fetch";
import { formatESSearchResult } from "@/lib/elasticsearch/util";
export const MANUAL_VALUE = "manual";
export default (props) => {
const {
btnLoading = false,
needAuth,
form: { getFieldDecorator },
initialValue,
isEdit,
tryConnect,
credentialRequired = false,
isManual,
setIsManual,
} = props;
const { loading, error, value, run } = useFetch(
"/credential/_search",
{
queryParams: {
from: 0,
size: 1000,
},
},
[]
);
const onCredentialChange = (value) => {
if (value === "manual") {
setIsManual(true);
} else {
setIsManual(false);
}
};
const { data, total } = useMemo(() => {
return formatESSearchResult(value);
}, [value]);
if (!needAuth) {
return null;
}
return (
<>
<Form.Item
label={formatMessage({
id: "cluster.regist.step.connect.label.agent_credential",
})}
>
{getFieldDecorator("agent_credential_id", {
initialValue: initialValue?.agent_credential_id
? initialValue?.agent_credential_id
: initialValue?.username
? MANUAL_VALUE
: undefined,
rules: [
{
required: credentialRequired,
message: formatMessage({
id: "cluster.regist.form.verify.required.agent_credential",
}),
},
],
})(
<Select loading={loading} onChange={onCredentialChange} allowClear>
<Select.Option value={MANUAL_VALUE}>
{formatMessage({
id: "cluster.regist.step.connect.credential.manual",
})}
</Select.Option>
{data.map((item) => (
<Select.Option value={item.id}>{item.name}</Select.Option>
))}
</Select>
)}
</Form.Item>
{isManual && (
<>
<Form.Item
label={formatMessage({
id: "cluster.regist.step.connect.label.username",
})}
>
{getFieldDecorator("agent_username", {
initialValue: initialValue?.username || "",
rules: [
{
required: credentialRequired,
message: formatMessage({
id: "cluster.regist.form.verify.required.auth_username",
}),
},
],
})(<Input autoComplete="off" placeholder={formatMessage({id: "agent.form.placeholder.auth.username"})} />)}
</Form.Item>
<Form.Item
label={formatMessage({
id: "cluster.regist.step.connect.label.password",
})}
hasFeedback
>
{getFieldDecorator("agent_password", {
initialValue: initialValue?.password || "",
rules: [
{
required: credentialRequired,
message: formatMessage({
id: "cluster.regist.form.verify.required.auth_password",
}),
},
],
})(
<Input.Password
autoComplete="off"
placeholder={formatMessage({
id: "cluster.regist.form.verify.required.auth_password",
})}
/>
)}
</Form.Item>
{isEdit && (
<>
<Form.Item label={" "}>
<div style={{ lineHeight: "20px" }}>
{formatMessage({
id: "cluster.regist.form.credential.manual.desc",
})}
</div>
</Form.Item>
</>
)}
</>
)}
</>
);
};

View File

@ -1,36 +1,22 @@
import { useGlobal } from "@/layouts/GlobalContext";
import request from "@/utils/request";
import { Form, Input, Switch, Icon, Button, Select, Alert } from "antd";
import { useEffect, useMemo, useRef, useState } from "react";
import { Form, Input, Switch, Icon, Button, Select } from "antd";
import { useMemo, useRef, useState } from "react";
import { Link, router } from "umi";
import { formatMessage } from "umi/locale";
import ClusterSelect from "@/components/ClusterSelect";
import SetAgentCredential from "./SetAgentCredential";
export default ({ onEnroll, loading }) => {
const { clusterList = [], clusterStatus } = useGlobal();
const [selectedCluster, setSelectedCluster] = useState([]);
const [auths, setAuths] = useState([]);
const onEnrollClick = () => {
if (selectedCluster.length === 0) return;
const newAuths = [...auths]
selectedCluster.forEach((item) => {
if (item.credential_id && !item.agent_credential_id) {
newAuths.push(item)
}
})
setAuths(newAuths)
if (newAuths.length === 0 && typeof onEnroll === "function") {
if (typeof onEnroll === "function") {
onEnroll(selectedCluster.map((item) => item.id));
}
};
useEffect(() => {
setAuths([])
}, [JSON.stringify(selectedCluster)])
return (
<div>
<div
@ -51,27 +37,6 @@ export default ({ onEnroll, loading }) => {
}}
/>
</div>
<SetAgentCredential selectedCluster={selectedCluster} setSelectedCluster={setSelectedCluster}/>
{
auths.length > 0 && (
<Alert style={{ marginTop: 10 }} type="error" message={(
<div>
<div>
{formatMessage({
id: "agent.instance.associate.auth.error",
})}
</div>
<div>
{ auths.map((item) => (
<div key={item.id}>
- {item.name}
</div>
)) }
</div>
</div>
)}/>
)
}
<div style={{ marginTop: 10, textAlign: "right" }}>
<div style={{ marginBottom: 15, color: "rgba(130,129,136,1)" }}>
<span>

View File

@ -228,7 +228,7 @@ export const AgentRowDetail = ({ agentID, t }) => {
})
}
>
<Button style={{padding: 0}} type="link" loading={btnLoading}>
<Button type="link" loading={btnLoading}>
Revoke
</Button>
</Popconfirm>

View File

@ -1,174 +0,0 @@
import request from "@/utils/request";
import { message, Table, Tooltip, Spin } from "antd";
import { useState } from "react";
import { formatMessage } from "umi/locale";
import { MANUAL_VALUE } from "./AgentCredentialForm";
import styles from "./SetAgentCredential.less";
import AgentCredential from "./AgentCredential";
import { ESPrefix } from "@/services/common";
import { cloneDeep } from "lodash";
import { connect } from "dva";
export default connect()((props) => {
const { selectedCluster, setSelectedCluster, dispatch } = props
const [status, setStatus] = useState({});
const [testLoading, setTestLoading] = useState(false);
const onAgentCredentialSave = async (values) => {
const newSelectedCluster = cloneDeep(selectedCluster);
const index = newSelectedCluster.findIndex((item) => item.id === values.id);
if (index !== -1) {
newSelectedCluster[index] = values;
setSelectedCluster(newSelectedCluster);
}
dispatch({
type: "global/fetchClusterList",
payload: {
size: 200,
name: "",
},
});
dispatch({
type: "global/fetchClusterStatus",
})
};
const expandedRowRender = (record) => {
return (
<AgentCredential
record={record}
onAgentCredentialSave={(values) => onAgentCredentialSave(values)}
/>
);
};
const tryConnect = async (values) => {
setTestLoading(true);
const body = {
basic_auth: {
username: values.agent_basic_auth?.username,
password: values.agent_basic_auth?.password,
},
host: values.host,
credential_id:
values.agent_credential_id !== MANUAL_VALUE
? values.agent_credential_id
: undefined,
schema: values.schema || "http",
};
if (
values.credential_id &&
!body.credential_id &&
(!body.basic_auth.username || !body.basic_auth.password)
) {
message.warning(formatMessage({ id: "agent.instance.associate.tips.connected.check" }));
setTestLoading(false);
return;
}
const res = await request(`${ESPrefix}/try_connect`, {
method: "POST",
body,
showErrorInner: true,
}, false, false);
setStatus({
...status,
[values.id]: {
status: res?.status,
error: res?.error,
},
});
setTestLoading(false);
};
return (
<>
<div style={{ marginTop: 32 }}>
<div
style={{
fontSize: 16,
color: "rgba(16, 16, 16, 1)",
fontWeight: 600,
marginBottom: 8,
}}
>
{formatMessage({ id: "agent.instance.associate.set_credential" })}
</div>
<div>{formatMessage({ id: "agent.instance.associate.set_credential.tips" })}</div>
</div>
<div style={{ marginTop: 15 }}>
<Table
size="small"
rowKey={"id"}
dataSource={selectedCluster || []}
className={styles.table}
columns={[
{
title: formatMessage({
id: "agent.instance.associate.labels.cluster_name",
}),
dataIndex: "name",
key: "name",
},
{
title: formatMessage({ id: "guide.cluster.auth" }),
dataIndex: "credential_id",
key: "credential_id",
render: (text, record) => {
return record.credential_id || record.basic_auth?.username
? formatMessage({
id: "cluster.regist.step.complete.tls.yes",
})
: formatMessage({
id: "cluster.regist.step.complete.tls.no",
});
},
},
{
title: formatMessage({ id: "agent.label.agent_credential" }),
dataIndex: "agent_credential_id",
key: "agent_credential_id",
render: (text, record) => {
return record.agent_credential_id ? "Set" : "No set";
},
},
{
title: formatMessage({ id: "alert.rule.table.columnns.status" }),
dataIndex: "status",
key: "Status",
render: (text, record) => {
if (!status[record.id]) return "-";
if (status[record.id].error) {
return (
<Tooltip title={status[record.id].error}>
<span style={{ color: "red" }}>{formatMessage({ id: "alert.rule.table.columnns.status.failed"})}</span>
</Tooltip>
);
}
return (
<span style={{ color: "green" }}>{formatMessage({ id: "alert.rule.table.columnns.status.succeeded"})}</span>
);
},
},
{
title: formatMessage({ id: "table.field.actions" }),
dataIndex: "",
key: "",
render: (record) =>
testLoading ? (
<Spin />
) : (
<a onClick={() => tryConnect(record)}>
{formatMessage({ id: "guide.cluster.test.connection" })}
</a>
),
},
]}
expandedRowRender={expandedRowRender}
/>
</div>
</>
);
});

View File

@ -1,7 +0,0 @@
.table {
:global {
tr.ant-table-expanded-row, tr.ant-table-expanded-row:hover {
background: #fff;
}
}
}

View File

@ -1,35 +1,22 @@
import { useGlobal } from "@/layouts/GlobalContext";
import request from "@/utils/request";
import { Form, Input, Switch, Icon, Button, Alert } from "antd";
import { useEffect, useMemo, useRef, useState } from "react";
import { Form, Input, Switch, Icon, Button, Select } from "antd";
import { useMemo, useRef, useState } from "react";
import { Link, router } from "umi";
import { formatMessage } from "umi/locale";
import ClusterSelect from "@/components/ClusterSelect";
import SetAgentCredential from "./SetAgentCredential";
export default ({ onBatchEnroll, loading }) => {
const { clusterList = [], clusterStatus } = useGlobal();
const [selectedCluster, setSelectedCluster] = useState([]);
const [auths, setAuths] = useState([]);
const onBatchEnrollClick = () => {
if (selectedCluster.length === 0) return;
const newAuths = [...auths]
selectedCluster.forEach((item) => {
if (item.credential_id && !item.agent_credential_id) {
newAuths.push(item)
}
})
setAuths(newAuths)
if (newAuths.length === 0 && typeof onBatchEnroll === "function") {
if (typeof onBatchEnroll === "function") {
onBatchEnroll(selectedCluster.map((item) => item.id));
}
};
useEffect(() => {
setAuths([])
}, [JSON.stringify(selectedCluster)])
return (
<div>
<div
@ -51,27 +38,6 @@ export default ({ onBatchEnroll, loading }) => {
}}
/>
</div>
<SetAgentCredential selectedCluster={selectedCluster} setSelectedCluster={setSelectedCluster}/>
{
auths.length > 0 && (
<Alert style={{ marginTop: 10 }} type="error" message={(
<div>
<div>
{formatMessage({
id: "agent.instance.associate.auth.error",
})}
</div>
<div>
{ auths.map((item) => (
<div key={item.id}>
- {item.name}
</div>
)) }
</div>
</div>
)}/>
)
}
<div style={{ marginTop: 10, textAlign: "right" }}>
<div style={{ marginBottom: 15, color: "rgba(130,129,136,1)" }}>
<span>

View File

@ -1,7 +0,0 @@
.table {
:global {
tr.ant-table-expanded-row, tr.ant-table-expanded-row:hover {
background: #fff;
}
}
}

View File

@ -379,37 +379,6 @@ const AgentList = (props) => {
}
};
const [clearLoading, setClearLoading] = useState(false)
const onClearClick = async ()=>{
setClearLoading(true);
const statusRes = await request(`/instance/_clear`, {
method: "POST",
queryParams: {
"app_name": "agent",
},
});
if(statusRes && statusRes.acknowledged){
message.success("submit successfully");
}
setClearLoading(false);
}
const showClearConfirm = useCallback(() => {
Modal.confirm({
title: formatMessage({ id: "agent.instance.clear.modal.title" }),
content: (
<>
<div>{formatMessage({ id: "agent.instance.clear.modal.desc" })}</div>
</>
),
okText: "Yes",
okType: "danger",
cancelText: "No",
onOk() {
onClearClick();
},
});
}, []);
return (
<PageHeaderWrapper>
<Card>
@ -421,7 +390,7 @@ const AgentList = (props) => {
marginBottom: 15,
}}
>
<div style={{ maxWidth: 450, flex: "1 1 auto" }}>
<div style={{ maxWidth: 500, flex: "1 1 auto" }}>
<Search
allowClear
placeholder="Type keyword to search"
@ -444,9 +413,6 @@ const AgentList = (props) => {
{
hasAuthority("agent.instance:all") && (
<>
<Button loading={clearLoading} onClick={showClearConfirm}>
{formatMessage({ id: "agent.instance.clear.title" })}
</Button>
<Button
type="primary"
onClick={() => {

View File

@ -349,10 +349,10 @@ const Index = (props) => {
},
];
const onTimeChange = ({ start, end, refresh }) => {
const onTimeChange = ({ start, end }) => {
dispatch({
type: "timeChange",
value: { start_time: start, end_time: end, refresh },
value: { start_time: start, end_time: end },
});
};
@ -503,7 +503,6 @@ const Index = (props) => {
delete newQueryParams._t;
delete newQueryParams.start_time;
delete newQueryParams.end_time;
delete newQueryParams.refresh;
return newQueryParams;
}, [JSON.stringify(queryParams)]);
@ -512,8 +511,8 @@ const Index = (props) => {
return { minUpdated: "", maxUpdated: "" };
}
return {
minUpdated: moment(dataSource.aggregations.min_updated?.value).tz(getTimezone()).utc().format(),
maxUpdated: moment(dataSource.aggregations.max_updated?.value).tz(getTimezone()).utc().format(),
minUpdated: moment(dataSource.aggregations.min_updated?.value),
maxUpdated: moment(dataSource.aggregations.max_updated?.value),
};
}, [dataSource.aggregations]);
@ -666,7 +665,7 @@ const Index = (props) => {
onRangeChange={onTimeChange}
{...refresh}
onRefreshChange={setRefresh}
onRefresh={(value) => onTimeChange({ ...(value || {}), refresh: new Date().valueOf()})}
onRefresh={onTimeChange}
timeZone={timeZone}
onTimeZoneChange={setTimeZone}
recentlyUsedRangesKey={'alerting-message'}
@ -682,6 +681,14 @@ const Index = (props) => {
gap: 10,
}}
>
<Button
icon="redo"
onClick={() => {
onRefreshClick();
}}
>
{formatMessage({ id: "form.button.refresh" })}
</Button>
{hasAuthority("alerting.message:all") ? (
<Dropdown overlay={batchMenu}>
<Button type="primary">
@ -708,7 +715,6 @@ const Index = (props) => {
to: maxUpdated,
}}
queryParams={widgetQueryParams}
refresh={queryParams?.refresh}
/>
</div>
<Table

View File

@ -123,6 +123,18 @@ const MessageDetail = (props) => {
recentlyUsedRangesKey={"rule-detail"}
/>
</div>
<Button
onClick={() => {
handleTimeChange({
start: timeRange.min,
end: timeRange.max,
});
}}
icon={"reload"}
type="primary"
>
{formatMessage({ id: "form.button.refresh" })}
</Button>
</div>
<div style={{marginTop: 15,display:"flex", gap: 15, marginBottom:10}}>
<div style={{flex: "1 1 50%"}}>

View File

@ -8,9 +8,6 @@ import EventMessageStatus from "./EventMessageStatus";
export default ({msgItem})=>{
const labelSpan = 6;
const vSpan = 18;
const isBucketDiff = !!(msgItem && msgItem.bucket_conditions)
return (
<Card size={"small"} title={formatMessage({ id: "alert.message.detail.title.event_detail" })}>
<div style={{lineHeight:"2em"}} >
@ -49,18 +46,6 @@ export default ({msgItem})=>{
<Col span={labelSpan}>{formatMessage({ id: "alert.message.table.duration" })}</Col>
<Col span={vSpan}>{moment.duration(msgItem?.duration).humanize()}</Col>
</Row>
<Row>
<Col span={labelSpan}>{formatMessage({ id: "alert.message.detail.condition.type" })}</Col>
<Col span={vSpan}>{isBucketDiff ? formatMessage({id: `alert.rule.form.label.buckets_diff`}) : formatMessage({id: `alert.rule.form.label.metrics_value`})}</Col>
</Row>
{
isBucketDiff && msgItem?.bucket_diff_type ? (
<Row>
<Col span={labelSpan}>{formatMessage({ id: "alert.message.detail.bucket_diff_type" })}</Col>
<Col span={vSpan}>{formatMessage({id: `alert.rule.form.label.${msgItem.bucket_diff_type}`}) }</Col>
</Row>
) : null
}
<Row>
<Col span={labelSpan}>{formatMessage({ id: "alert.message.detail.condition" })}</Col>
<Col span={vSpan}>{msgItem?.hit_condition}</Col>

View File

@ -18,7 +18,7 @@ import { PriorityIconText } from "../../components/Statistic";
import WidgetLoader from "@/pages/DataManagement/View/WidgetLoader";
const Option = Select.Option;
const RuleRecords = ({ ruleID, timeRange, showAertMetric = false, refresh }) => {
const RuleRecords = ({ ruleID, timeRange, showAertMetric = false }) => {
if (!ruleID || !timeRange.min) {
return null;
}
@ -231,7 +231,6 @@ const RuleRecords = ({ ruleID, timeRange, showAertMetric = false, refresh }) =>
priority: queryParams.priority,
state: queryParams.state,
}}
refresh={refresh}
/>
</div>
) : null}

View File

@ -69,20 +69,18 @@ export default Form.create({ name: "rule_form_edit" })((props) => {
const [editValue] = useMemo(() => {
let editValue = value?._source || {};
if (editValue?.metrics && (editValue?.conditions || editValue?.bucket_conditions)) {
if (editValue?.metrics && editValue?.conditions) {
editValue.alert_objects = [
{
name: editValue.name,
metrics: editValue.metrics,
conditions: editValue.conditions,
bucket_conditions: editValue.bucket_conditions,
schedule: editValue.schedule,
},
];
delete editValue.name;
delete editValue.metrics;
delete editValue.conditions;
delete editValue.bucket_conditions;
delete editValue.schedule;
}
return [editValue];

View File

@ -231,20 +231,16 @@ const RuleForm = (props) => {
delete values.alert_objects;
alert_objects = alert_objects.map((alert_object) => {
if (alert_object.conditions) {
alert_object.conditions["operator"] = "any";
alert_object.conditions.items = alert_object.conditions.items.map(
(item) => {
return {
...item,
minimum_period_match: parseInt(item.minimum_period_match),
};
}
);
}
if (alert_object.bucket_conditions?.items) {
alert_object.bucket_conditions.items = alert_object.bucket_conditions.items.filter((item) => !!item.type);
}
alert_object.conditions["operator"] = "any";
alert_object.conditions.items = alert_object.conditions.items.map(
(item) => {
return {
...item,
minimum_period_match: parseInt(item.minimum_period_match),
};
}
);
return { ...values, ...alert_object };
});
return alert_objects;

View File

@ -1,45 +1,13 @@
import { Form, Input, Select, Button, Icon, Radio, InputNumber } from "antd";
import { useCallback, useEffect, useMemo, useState } from "react";
import { Form, Input, Select, Button, Icon } from "antd";
import { useCallback, useMemo, useState } from "react";
import "./form.scss";
import { formatMessage } from "umi/locale";
import { PriorityColor } from "../utils/constants";
import { cloneDeep } from "lodash";
const { Option } = Select;
const InputGroup = Input.Group;
const lastsPeriods = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15]
const operators = ['equals', 'gte', 'gt', 'lt', 'lte', 'range']
const FormAlertCondition = (props) => {
const { conditions, bucketConditions } = props;
const [type, setType] = useState('metrics_value')
useEffect(() => {
if (bucketConditions?.items?.length > 0) {
setType('buckets_diff')
}
}, [JSON.stringify(conditions), JSON.stringify(bucketConditions)])
return (
<>
<Radio.Group value={type} onChange={(e) => setType(e.target.value)}>
{
['metrics_value', 'buckets_diff'].map((item) => (
<Radio.Button key={item} value={item}>{formatMessage({
id: `alert.rule.form.label.${item}`,
})}</Radio.Button>
))
}
</Radio.Group>
{ type === 'metrics_value' ? <MetricsValue {...props} /> : <BucketsDiff {...props} /> }
</>
)
};
export default FormAlertCondition;
const MetricsValue = (props) => {
const { getFieldDecorator } = props.form;
const alertObjectIndex = props.alertObjectIndex || 0;
const conditions = props.conditions || {};
@ -104,18 +72,95 @@ const MetricsValue = (props) => {
props.onPreviewChartChange();
}}
>
{
lastsPeriods.map((item) => (
<Option key={`${item}`} value={`${item}`}>
{formatMessage(
{
id: "alert.rule.form.label.lasts_periods",
},
{ num: item }
)}
</Option>
))
}
<Option value="1">
{formatMessage(
{
id: "alert.rule.form.label.lasts_periods",
},
{ num: 1 }
)}
</Option>
<Option value="2">
{formatMessage(
{
id: "alert.rule.form.label.lasts_periods",
},
{ num: 2 }
)}
</Option>
<Option value="3">
{formatMessage(
{
id: "alert.rule.form.label.lasts_periods",
},
{ num: 3 }
)}
</Option>
<Option value="4">
{formatMessage(
{
id: "alert.rule.form.label.lasts_periods",
},
{ num: 4 }
)}
</Option>
<Option value="5">
{formatMessage(
{
id: "alert.rule.form.label.lasts_periods",
},
{ num: 5 }
)}
</Option>
<Option value="6">
{formatMessage(
{
id: "alert.rule.form.label.lasts_periods",
},
{ num: 6 }
)}
</Option>
<Option value="7">
{formatMessage(
{
id: "alert.rule.form.label.lasts_periods",
},
{ num: 7 }
)}
</Option>
<Option value="8">
{formatMessage(
{
id: "alert.rule.form.label.lasts_periods",
},
{ num: 8 }
)}
</Option>
<Option value="9">
{formatMessage(
{
id: "alert.rule.form.label.lasts_periods",
},
{ num: 9 }
)}
</Option>
<Option value="10">
{formatMessage(
{
id: "alert.rule.form.label.lasts_periods",
},
{ num: 10 }
)}
</Option>
<Option value="15">
{formatMessage(
{
id: "alert.rule.form.label.lasts_periods",
},
{ num: 15 }
)}
</Option>
</Select>
)}
</Form.Item>
@ -142,7 +187,12 @@ const MetricsValue = (props) => {
setOperatorState({ ...operatorState, [`op${i}`]: value });
}}
>
{ operators.map((item) => <Option key={item} value={item}>{item}</Option>)}
<Option value="equals">equals</Option>
<Option value="gte">gte</Option>
<Option value="gt">gt</Option>
<Option value="lt">lt</Option>
<Option value="lte">lte</Option>
<Option value="range">range</Option>
</Select>
)}
</Form.Item>
@ -317,326 +367,6 @@ const MetricsValue = (props) => {
})}
</div>
);
}
};
const BucketsDiff = (props) => {
const { getFieldDecorator } = props.form;
const alertObjectIndex = props.alertObjectIndex || 0;
const conditions = props.bucketConditions || {};
const [conditionItems, setConditionItems] = useState(conditions?.items || [{ type: 'size' }]);
return (
<div className="group-wrapper">
{conditionItems.map((conditionItem, i) => {
return (
<div key={i}>
<InputGroup compact>
<Form.Item>
{getFieldDecorator(
`alert_objects[${alertObjectIndex}][bucket_conditions][items][${i}][type]`,
{
initialValue: conditionItem.type || "size",
}
)(
<Select style={{ width: 120 }} onChange={(value) => {
const newItems = cloneDeep(conditionItems)
newItems[i].type = value
if (value === 'content') {
newItems[i].values = undefined
newItems[i].operator = undefined
}
setConditionItems(newItems)
}}>
<Option value={'size'}>{formatMessage({id: `alert.rule.form.label.size`})}</Option>
<Option value={'content'}>{formatMessage({id: `alert.rule.form.label.content`})}</Option>
</Select>
)}
</Form.Item>
<Form.Item>
<Input
style={{
width: 40,
textAlign: "center",
pointerEvents: "none",
backgroundColor: "#fafafa",
color: "rgba(0, 0, 0, 0.65)",
}}
defaultValue={formatMessage({
id: `alert.rule.form.label.in`,
})}
disabled
/>
</Form.Item>
<Form.Item>
{getFieldDecorator(
`alert_objects[${alertObjectIndex}][bucket_conditions][items][${i}][bucket_count]`,
{
initialValue: conditionItem.bucket_count || 10,
rules: [
{
required: true,
message: "Please select period!",
},
],
}
)(
<InputNumber style={{ width: 60 }} min={2} max={50} precision={0} step={1}/>
)}
</Form.Item>
<Form.Item>
<Input
style={{
width: 100,
textAlign: "center",
pointerEvents: "none",
backgroundColor: "#fafafa",
color: "rgba(0, 0, 0, 0.65)",
}}
defaultValue={formatMessage({
id: `alert.rule.form.label.stat_period`,
})}
disabled
/>
</Form.Item>
<Form.Item>
{getFieldDecorator(
`alert_objects[${alertObjectIndex}][bucket_conditions][items][${i}][minimum_period_match]`,
{
initialValue: conditionItem.minimum_period_match || 1,
rules: [
{
required: true,
message: "Please select periods match!",
},
],
}
)(
<Select
allowClear
showSearch
style={{ width: 140 }}
placeholder={formatMessage(
{
id: "alert.rule.form.label.lasts_periods",
},
{ num: 1 }
)}
onChange={(value) => {
props.onPreviewChartChange();
}}
>
{
lastsPeriods.map((item) => (
<Option key={item} value={item}>
{formatMessage(
{
id: "alert.rule.form.label.lasts_periods",
},
{ num: item }
)}
</Option>
))
}
</Select>
)}
</Form.Item>
<>
<Form.Item>
{getFieldDecorator(
`alert_objects[${alertObjectIndex}][bucket_conditions][items][${i}][operator]`,
{
initialValue: conditionItem.operator,
rules: [
{
required: true,
message: "Please select operator!",
},
],
}
)(
<Select
allowClear
showSearch
style={{ width: 80 }}
placeholder={"equals"}
onChange={(value) => {
props.onPreviewChartChange();
const newItems = cloneDeep(conditionItems)
newItems[i].operator = value
setConditionItems(newItems)
}}
>
{ operators.map((item) => <Option key={item} value={item}>{item}</Option>)}
</Select>
)}
</Form.Item>
{conditionItem.operator === "range" ? (
<>
<Form.Item>
{getFieldDecorator(
`alert_objects[${alertObjectIndex}][bucket_conditions][items][${i}][values][0]`,
{
initialValue: conditionItem.values?.[0],
rules: [
{
required: true,
message: "Please input min value!",
},
],
}
)(
<Input
style={{ width: 80 }}
placeholder="min value"
onChange={(e) => {
props.onPreviewChartChange();
}}
/>
)}
</Form.Item>
<span
style={{
display: "inline-block",
lineHeight: "40px",
textAlign: "center",
}}
>
<Icon type="minus" />
</span>
<Form.Item>
{getFieldDecorator(
`alert_objects[${alertObjectIndex}][bucket_conditions][items][${i}][values][1]`,
{
initialValue: conditionItem.values?.[1],
rules: [
{
required: true,
message: "Please input max value!",
},
],
}
)(
<Input
style={{ width: 80 }}
placeholder="max value"
onChange={(e) => {
props.onPreviewChartChange();
}}
/>
)}
</Form.Item>
</>
) : (
<Form.Item>
{getFieldDecorator(
`alert_objects[${alertObjectIndex}][bucket_conditions][items][${i}][values][0]`,
{
initialValue: conditionItem.values?.[0],
rules: [
{
required: true,
message: "Please input value!",
},
],
}
)(
<Input
style={{ width: 80 }}
placeholder="value"
onChange={(e) => {
props.onPreviewChartChange();
}}
/>
)}
</Form.Item>
)}
</>
<Form.Item>
<Input
style={{
width: 80,
textAlign: "center",
pointerEvents: "none",
backgroundColor: "#fafafa",
color: "rgba(0, 0, 0, 0.65)",
}}
defaultValue={formatMessage({
id: "alert.rule.form.label.trigger",
})}
disabled
/>
</Form.Item>
<Form.Item>
{getFieldDecorator(
`alert_objects[${alertObjectIndex}][bucket_conditions][items][${i}][priority]`,
{
initialValue: conditionItem.priority,
rules: [
{
required: true,
message: "Please select priority!",
},
],
}
)(
<Select
allowClear
showSearch
style={{ width: 120 }}
placeholder={"P1(High)"}
onChange={(value) => {
props.onPreviewChartChange();
}}
>
{Object.keys(PriorityColor).map((item) => {
return (
<Option key={item} value={item}>
{formatMessage({
id: `alert.message.priority.${item}`,
})}
</Option>
);
})}
</Select>
)}
</Form.Item>
{conditionItems.length > 1 && i > 0 ? (
<Icon
className="dynamic-delete-button"
type="close-circle"
onClick={() => {
setConditionItems(conditionItems.filter((_, key) => key !== i));
}}
/>
) : null}
{i == 0 ? (
<Form.Item>
<Button
type="primary"
icon="plus"
onClick={() => {
if (conditionItems.length >= 5) {
return;
}
setConditionItems([...conditionItems, { type: 'size' }]);
}}
size="small"
style={{ marginLeft: 10 }}
disabled={conditionItems.length >= 5 ? true : false}
>
{formatMessage({
id: "alert.rule.form.button.add_condition",
})}
</Button>
</Form.Item>
) : null}
</InputGroup>
</div>
)
})}
</div>
);
}
export default FormAlertCondition;

View File

@ -106,9 +106,7 @@ const FormAlertObject = (props) => {
statPeriod={props.statPeriod}
/>
</Form.Item>
<Form.Item label={formatMessage({
id: "alert.rule.form.label.bucket_label_template",
})}>
<Form.Item label="Bucket Label Template">
<FormBucketLabel form={props.form} alertObjectIndex={i} initialValue={item?.metrics?.bucket_label || {}} />
</Form.Item>
<Form.Item
@ -120,8 +118,7 @@ const FormAlertObject = (props) => {
<FormAlertCondition
form={props.form}
alertObjectIndex={i}
conditions={item?.conditions}
bucketConditions={item?.bucket_conditions}
conditions={item?.conditions || {}}
onPreviewChartChange={props.onPreviewChartChange}
/>
</Form.Item>

View File

@ -12,7 +12,7 @@ import moment from "moment";
import { Link } from "umi";
const Option = Select.Option;
export default ({ ruleID, timeRange, refresh }) => {
export default ({ ruleID, timeRange }) => {
const initialQueryParams = {
from: 0,
size: 10,
@ -261,7 +261,6 @@ export default ({ ruleID, timeRange, refresh }) => {
priority: queryParams.priority,
status: queryParams.status,
}}
refresh={refresh}
/>
</div>
<Table

View File

@ -94,7 +94,6 @@ const RuleCard = ({ ruleID, data = {} }) => {
);
};
const clusters = useGlobalClusters();
const isBucketDiff = !!(data && data.bucket_conditions)
return (
<div>
@ -159,16 +158,10 @@ const RuleCard = ({ ruleID, data = {} }) => {
<span style={{ wordBreak: "break-all" }}>{data?.expression}</span>
</Col>
</Row>
<Row style={{ marginBottom: 10}}>
<Col span={6}>{formatMessage({ id: "alert.rule.table.columnns.condition.type" })}</Col>
<Col span={18}>
{isBucketDiff ? formatMessage({id: `alert.rule.form.label.buckets_diff`}) : formatMessage({id: `alert.rule.form.label.metrics_value`})}
</Col>
</Row>
<Row style={{ marginBottom: 30 }}>
<Col span={6}>{formatMessage({ id: "alert.rule.table.columnns.condition" })}</Col>
<Col span={6}>Condition</Col>
<Col span={18}>
<Conditions items={isBucketDiff ? data.bucket_conditions?.items : data.conditions?.items} />
<Conditions items={data.conditions?.items} />
</Col>
</Row>
</Card>
@ -180,9 +173,6 @@ const Conditions = ({ items }) => {
return (items || []).map((item) => {
let operator = "";
switch (item.operator) {
case "equals":
operator = "=";
break;
case "gte":
operator = ">=";
break;
@ -195,29 +185,11 @@ const Conditions = ({ items }) => {
case "lte":
operator = "<=";
break;
case "range":
operator = "range";
break;
}
return (
<div key={item.priority} style={{ marginBottom: 10 }}>
{item.type && (<span style={{ marginRight: 15 }}>{formatMessage({id: `alert.rule.form.label.${item.type}`})}</span>)}
{
operator === 'range' ? (
<>
<span>{`>=`}</span>
<span style={{ marginRight: 4 }}>{item.values[0]}</span>
<span style={{ marginRight: 4 }}>{`&`}</span>
<span>{`<=`}</span>
<span style={{ marginRight: 15 }}>{item.values[1]}</span>
</>
) : (
<>
<span>{operator} </span>
<span style={{ marginRight: 15 }}>{item.values[0]}</span>
</>
)
}
<span>{operator} </span>
<span style={{ marginRight: 15 }}>{item.values[0]}</span>
<PriorityIconText priority={item.priority} />
</div>
);

View File

@ -134,7 +134,7 @@ const RuleDetail = (props) => {
setParam({ ...param, timeRange: state.timeRange });
}, [state.timeRange]);
const handleTimeChange = ({ start, end, refresh }) => {
const handleTimeChange = ({ start, end }) => {
setState({
...state,
spinning: true,
@ -143,7 +143,6 @@ const RuleDetail = (props) => {
max: end,
timeFormatter: formatter.dates(1),
},
refresh: refresh || state.refresh
});
};
@ -333,12 +332,24 @@ const RuleDetail = (props) => {
onRangeChange={handleTimeChange}
{...refresh}
onRefreshChange={setRefresh}
onRefresh={(value) => handleTimeChange({ ...(value || {}), refresh: new Date().valueOf()})}
onRefresh={handleTimeChange}
timeZone={timeZone}
onTimeZoneChange={setTimeZone}
recentlyUsedRangesKey={"rule-detail"}
/>
</div>
<Button
onClick={() => {
handleTimeChange({
start: state.timeRange.min,
end: state.timeRange.max,
});
}}
icon={"reload"}
type="primary"
>
{formatMessage({ id: "form.button.refresh" })}
</Button>
</div>
<div style={{ display: "flex", gap: 15, marginBottom: 20 }}>
<div style={{ flex: "1 1 50%" }}>
@ -368,7 +379,6 @@ const RuleDetail = (props) => {
from: state.timeRange.min,
to: state.timeRange.max,
}}
refresh={state.refresh}
/>
) : (
<Empty />
@ -390,7 +400,6 @@ const RuleDetail = (props) => {
to: state.timeRange.max,
}}
queryParams={{ rule_id: ruleID }}
refresh={state.refresh}
/>
</Card>
</div>
@ -410,7 +419,7 @@ const RuleDetail = (props) => {
key="alerts"
tab={formatMessage({ id: "alert.rule.detail.title.alert_event" })}
>
<MessageRecord ruleID={ruleID} timeRange={state.timeRange} refresh={state.refresh}/>
<MessageRecord ruleID={ruleID} timeRange={state.timeRange} />
</Tabs.TabPane>
<Tabs.TabPane
key="history"
@ -420,7 +429,6 @@ const RuleDetail = (props) => {
ruleID={ruleID}
timeRange={state.timeRange}
showAertMetric={true}
refresh={state.refresh}
/>
</Tabs.TabPane>
</Tabs>

View File

@ -178,9 +178,6 @@ const Discover = (props) => {
field: "",
enabled: false,
};
const [histogramVisible, setHistogramVisible] = useState(false)
const [distinctParams, setDistinctParams] = React.useState(
distinctParamsDefault
);
@ -1125,7 +1122,7 @@ const Discover = (props) => {
getVisualizations={() => visRef?.current?.getVisualizations()}
searchInfo={{
took,
total: hits,
hits,
...timeChartProps,
}}
selectedQueriesId={selectedQueriesId}
@ -1163,12 +1160,6 @@ const Discover = (props) => {
}
}}
showLayoutListIcon={false}
histogramProps={{
histogramData,
onHistogramToggle: () => {
setHistogramVisible(!histogramVisible)
},
}}
// viewLayout={viewLayout}
// onViewLayoutChange={(layout) => {
// if (layout) {
@ -1315,31 +1306,6 @@ const Discover = (props) => {
responsive={false}
style={{ position: "relative" }}
>
{histogramVisible && opts.timefield && (
<EuiFlexItem>
<section
aria-label={"Histogram of found documents"}
className="dscTimechart"
>
{opts.chartAggConfigs &&
histogramData &&
records.length !== 0 && (
<div
className="dscHistogramGrid"
data-test-subj="discoverChart"
>
<DiscoverHistogram
chartData={histogramData}
timefilterUpdateHandler={
timefilterUpdateHandler
}
/>
</div>
)}
</section>
<EuiSpacer size="s" />
</EuiFlexItem>
)}
<EuiFlexItem className="eui-yScroll">
<section
className="dscTable eui-yScroll"

View File

@ -744,7 +744,6 @@ class Index extends PureComponent {
onChangeDeleteIndexConfirmState={this.onChangeDeleteIndexConfirmState}
deleteIndexConfirm={this.state.deleteIndexConfirm}
items={this.state.deleteIndexItems}
selectedCluster={this.props.selectedCluster}
/>
</PageHeaderWrapper>
);

View File

@ -1,15 +0,0 @@
import { DiscoverHistogram } from "@/components/vendor/discover/public/application/components/histogram/histogram";
import { Icon, Popover } from "antd"
import { useEffect, useRef, useState } from "react";
import styles from "./index.less";
export default (props) => {
const { onHistogramToggle } = props
return (
<Icon type="bar-chart" title="show histogram" style={{color: '#006BB4', cursor: 'pointer'}} onClick={() => {
onHistogramToggle()
}}/>
)
}

View File

@ -1,9 +0,0 @@
.histogram {
z-index: 1;
:global {
.ant-popover-inner-content {
width: 400px;
padding: 0;
}
}
}

View File

@ -5,9 +5,9 @@ import InsightConfig, { ISearchConfig } from "../InsightConfig";
import styles from './index.less';
import { create, list, remove, update } from "../services/queries";
import FullScreen from "../FullScreen";
import ModeHandler from "../ModeHandler";
import { Icon, message } from "antd";
import SearchInfo from "../SearchInfo";
import Histogram from "../Histogram";
import ViewLayout from "../ViewLayout";
export interface IQueries {
@ -72,8 +72,7 @@ export default forwardRef((props: IProps, ref: any) => {
onSearchConfigChange,
showLayoutListIcon,
viewLayout,
onViewLayoutChange,
histogramProps = {}
onViewLayoutChange
} = props;
const {
@ -184,7 +183,6 @@ export default forwardRef((props: IProps, ref: any) => {
return (
<div className={styles.bar}>
<SearchInfo {...searchInfo} loading={searchLoading}/>
{ histogramProps?.histogramData && <Histogram {...histogramProps}/>}
<SaveQueries
tags={tags}
onTagsChange={setTags}

View File

@ -30,7 +30,7 @@ export interface IProps {
* selected interval
*/
stateInterval: string;
total: number;
hits: number;
took?: number;
}
@ -39,7 +39,7 @@ export default ({
dateFormat,
timeRange,
stateInterval,
total,
hits,
took,
}: IProps) => {
const [interval, setInterval] = useState(stateInterval);
@ -69,7 +69,7 @@ export default ({
>
<EuiFlexItem grow={false}>
<div style={{ fontSize: 12}}>
Found <span style={{fontWeight: "bold" }}>{total}</span>{" "}
Found <span style={{fontWeight: "bold" }}>{hits}</span>{" "}
records {took && (
<span style={{marginLeft: 5 }}>
({took} milliscond)

View File

@ -1,40 +1,17 @@
import { Icon, Popover } from "antd"
import { useEffect, useRef, useState } from "react";
import { useState } from "react";
import Info, { IProps } from "./Info";
import styles from './index.scss';
export default (props: IProps & { loading: boolean }) => {
const { loading, total } = props
const [showResultCount, setShowResultCount] = useState(true);
const timerRef = useRef(null)
const autoHiddenRef = useRef(true)
useEffect(() => {
if (timerRef.current) {
clearTimeout(timerRef.current)
}
if (showResultCount) {
timerRef.current = setTimeout(() => {
if (autoHiddenRef.current) {
setShowResultCount(false)
}
}, 3000);
}
}, [showResultCount])
useEffect(() => {
if (loading) {
autoHiddenRef.current = true
}
}, [loading])
if (typeof total !== 'number' || total <= 0) return null;
if (typeof props.hits !== 'number' || props.hits <= 0) return null;
return (
<Popover
visible={!loading && showResultCount}
visible={!props.loading && showResultCount}
placement="left"
title={null}
overlayClassName={styles.searchInfo}
@ -44,14 +21,7 @@ export default (props: IProps & { loading: boolean }) => {
dateFormat={"YYYY-MM-DD H:mm"}
/>
)}>
<Icon type="info-circle" style={{color: '#006BB4', cursor: 'pointer'}} onClick={() => {
if (showResultCount) {
autoHiddenRef.current = true
} else {
autoHiddenRef.current = false
}
setShowResultCount(!showResultCount)
}}/>
<Icon type="info-circle" style={{color: '#006BB4', cursor: 'pointer'}} onClick={() => setShowResultCount(!showResultCount)}/>
</Popover>
)
}

View File

@ -54,8 +54,7 @@ export default (props) => {
isEdit,
fetchParamsCache,
handleContextMenu,
isFullScreen,
onResultChange
isFullScreen
} = props;
const { series = [] } = record;
@ -100,7 +99,6 @@ export default (props) => {
if (!refresh) {
setLoading(true)
setData()
onResultChange && onResultChange()
}
if (isTimeSeries && !range) {
@ -191,10 +189,8 @@ export default (props) => {
}
res.hits.hits = res.hits.hits || [];
setData(res.hits)
onResultChange && onResultChange(res)
}
} else {
const index_pattern = indices.join(',')
const bodys = metrics.map((item) => {
const { groups = [] } = item;
let newGroups = cloneDeep(groups);
@ -208,7 +204,7 @@ export default (props) => {
return {
cluster_id,
filter,
index_pattern,
index_pattern: indices.join(','),
time_field: time_field,
...item,
items: item.items || [],
@ -223,7 +219,6 @@ export default (props) => {
if (res) {
if (res.some((item) => item.status === 403)) {
setData({ error: 403 })
onResultChange && onResultChange()
setLoading(false);
return;
}
@ -274,15 +269,8 @@ export default (props) => {
}
}
setData(newData)
onResultChange && onResultChange(res.map((item) => (
{
...item,
request: item.request ? `GET ${index_pattern}/_search\n${item.request}` : undefined
}
)))
} else {
setData([])
onResultChange && onResultChange()
}
}
setLoading(false);
@ -297,7 +285,7 @@ export default (props) => {
[timeFieldName] : {
...(bounds.min && { gte: bounds.min.toISOString() }),
...(bounds.max && { lte: bounds.max.toISOString() }),
// format: 'strict_date_optional_time',
format: 'strict_date_optional_time',
}
}
}

View File

@ -41,8 +41,7 @@ export default (props) => {
queriesBarParams,
isFullScreen,
hideHeader,
displayOptions={},
onResultChange,
displayOptions={}
} = props;
const [cacheRecord, setCacheRecord] = useState(record)
@ -252,8 +251,7 @@ export default (props) => {
fetchParamsCache={fetchParamsCacheRef}
queriesBarParams={queriesBarParams}
handleContextMenu={handleContextMenu}
isFullScreen={isFullScreen}
onResultChange={onResultChange}
isFullScreen={isFullScreen}
/>
</Spin>
<WidgetConfigDrawer

View File

@ -41,7 +41,7 @@ export default (props) => {
to = bounds.max;
}
if (!from || !to) return data
const newData = cloneDeep(data.sort((a, b) => a.timestamp - b.timestamp))
const newData = cloneDeep(data)
const fromTimestamp = moment(from).valueOf();
const toTimestamp = moment(to).valueOf();
let start = newData[0].timestamp;

Some files were not shown because too many files have changed in this diff Show More