update alerting api
This commit is contained in:
parent
bfd79d751b
commit
9f9ed6894f
|
@ -11,7 +11,6 @@ import (
|
||||||
|
|
||||||
type Metric struct {
|
type Metric struct {
|
||||||
PeriodInterval string `json:"period_interval"`
|
PeriodInterval string `json:"period_interval"`
|
||||||
MaxPeriods int `json:"max_periods"`
|
|
||||||
Items []MetricItem `json:"items"`
|
Items []MetricItem `json:"items"`
|
||||||
Formula string `json:"formula,omitempty"`
|
Formula string `json:"formula,omitempty"`
|
||||||
Expression string `json:"expression" elastic_mapping:"expression:{type:keyword,copy_to:search_text}"` //告警表达式,自动生成 eg: avg(cpu) > 80
|
Expression string `json:"expression" elastic_mapping:"expression:{type:keyword,copy_to:search_text}"` //告警表达式,自动生成 eg: avg(cpu) > 80
|
||||||
|
|
|
@ -61,7 +61,6 @@ func TestCreateRule( t *testing.T) {
|
||||||
|
|
||||||
Metrics: Metric{
|
Metrics: Metric{
|
||||||
PeriodInterval: "1m",
|
PeriodInterval: "1m",
|
||||||
MaxPeriods: 15,
|
|
||||||
Items: []MetricItem{
|
Items: []MetricItem{
|
||||||
{Name: "a", Field: "payload.elasticsearch.node_stats.fs.total.free_in_bytes", Statistic: "min", Group: []string{"metadata.labels.cluster_id", "metadata.labels.node_id"}},
|
{Name: "a", Field: "payload.elasticsearch.node_stats.fs.total.free_in_bytes", Statistic: "min", Group: []string{"metadata.labels.cluster_id", "metadata.labels.node_id"}},
|
||||||
{Name: "b", Field: "payload.elasticsearch.node_stats.fs.total.total_in_bytes", Statistic: "max", Group: []string{"metadata.labels.cluster_id", "metadata.labels.node_id"}},
|
{Name: "b", Field: "payload.elasticsearch.node_stats.fs.total.total_in_bytes", Statistic: "max", Group: []string{"metadata.labels.cluster_id", "metadata.labels.node_id"}},
|
||||||
|
|
|
@ -215,7 +215,7 @@ func (h *AlertAPI) getAlertMessage(w http.ResponseWriter, req *http.Request, ps
|
||||||
exists, err = orm.Get(rule)
|
exists, err = orm.Get(rule)
|
||||||
if !exists || err != nil {
|
if !exists || err != nil {
|
||||||
log.Error(err)
|
log.Error(err)
|
||||||
h.WriteError(w, fmt.Sprintf("rule[%s] not found", rule.ID), http.StatusInternalServerError)
|
h.WriteError(w, fmt.Sprintf("rule [%s] not found", rule.ID), http.StatusInternalServerError)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
metricExpression, _ := rule.Metrics.GenerateExpression()
|
metricExpression, _ := rule.Metrics.GenerateExpression()
|
||||||
|
|
|
@ -7,13 +7,16 @@ package alerting
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
log "github.com/cihub/seelog"
|
log "github.com/cihub/seelog"
|
||||||
|
"github.com/r3labs/diff/v2"
|
||||||
"infini.sh/console/model/alerting"
|
"infini.sh/console/model/alerting"
|
||||||
alerting2 "infini.sh/console/service/alerting"
|
alerting2 "infini.sh/console/service/alerting"
|
||||||
_ "infini.sh/console/service/alerting/elasticsearch"
|
_ "infini.sh/console/service/alerting/elasticsearch"
|
||||||
httprouter "infini.sh/framework/core/api/router"
|
httprouter "infini.sh/framework/core/api/router"
|
||||||
"infini.sh/framework/core/elastic"
|
"infini.sh/framework/core/elastic"
|
||||||
|
"infini.sh/framework/core/event"
|
||||||
"infini.sh/framework/core/kv"
|
"infini.sh/framework/core/kv"
|
||||||
"infini.sh/framework/core/orm"
|
"infini.sh/framework/core/orm"
|
||||||
|
"infini.sh/framework/core/queue"
|
||||||
"infini.sh/framework/core/task"
|
"infini.sh/framework/core/task"
|
||||||
"infini.sh/framework/core/util"
|
"infini.sh/framework/core/util"
|
||||||
"infini.sh/framework/modules/elastic/api"
|
"infini.sh/framework/modules/elastic/api"
|
||||||
|
@ -55,7 +58,6 @@ func (alertAPI *AlertAPI) createRule(w http.ResponseWriter, req *http.Request, p
|
||||||
ids = append(ids, rule.ID)
|
ids = append(ids, rule.ID)
|
||||||
rule.Created = time.Now()
|
rule.Created = time.Now()
|
||||||
rule.Updated = time.Now()
|
rule.Updated = time.Now()
|
||||||
rule.Metrics.MaxPeriods = 15
|
|
||||||
if rule.Schedule.Interval == ""{
|
if rule.Schedule.Interval == ""{
|
||||||
rule.Schedule.Interval = "1m"
|
rule.Schedule.Interval = "1m"
|
||||||
}
|
}
|
||||||
|
@ -68,6 +70,11 @@ func (alertAPI *AlertAPI) createRule(w http.ResponseWriter, req *http.Request, p
|
||||||
}, http.StatusInternalServerError)
|
}, http.StatusInternalServerError)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
saveAlertActivity("alerting_rule_change", "create", util.MapStr{
|
||||||
|
"cluster_id": rule.Resource.ID,
|
||||||
|
"rule_id": rule.ID,
|
||||||
|
"cluster_name": rule.Resource.Name,
|
||||||
|
},nil, &rule)
|
||||||
eng := alerting2.GetEngine(rule.Resource.Type)
|
eng := alerting2.GetEngine(rule.Resource.Type)
|
||||||
if rule.Enabled {
|
if rule.Enabled {
|
||||||
ruleTask := task.ScheduleTask{
|
ruleTask := task.ScheduleTask{
|
||||||
|
@ -189,12 +196,55 @@ func (alertAPI *AlertAPI) getRuleDetail(w http.ResponseWriter, req *http.Request
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func saveActivity(activityInfo *event.Activity){
|
||||||
|
queueConfig := queue.GetOrInitConfig("platform##activities")
|
||||||
|
if queueConfig.Labels == nil {
|
||||||
|
queueConfig.Labels = map[string]interface{}{
|
||||||
|
"type": "platform",
|
||||||
|
"name": "activity",
|
||||||
|
"category": "elasticsearch",
|
||||||
|
"activity": true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err := queue.Push(queueConfig, util.MustToJSONBytes(event.Event{
|
||||||
|
Timestamp: time.Now(),
|
||||||
|
Metadata: event.EventMetadata{
|
||||||
|
Category: "elasticsearch",
|
||||||
|
Name: "activity",
|
||||||
|
},
|
||||||
|
Fields: util.MapStr{
|
||||||
|
"activity": activityInfo,
|
||||||
|
}}))
|
||||||
|
if err != nil {
|
||||||
|
log.Error(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func saveAlertActivity(name, typ string, labels map[string]interface{}, changelog diff.Changelog, oldState interface{}){
|
||||||
|
activityInfo := &event.Activity{
|
||||||
|
ID: util.GetUUID(),
|
||||||
|
Timestamp: time.Now(),
|
||||||
|
Metadata: event.ActivityMetadata{
|
||||||
|
Category: "elasticsearch",
|
||||||
|
Group: "platform",
|
||||||
|
Name: name,
|
||||||
|
Type: typ,
|
||||||
|
Labels: labels,
|
||||||
|
},
|
||||||
|
Changelog: changelog,
|
||||||
|
Fields: util.MapStr{
|
||||||
|
"rule": oldState,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
saveActivity(activityInfo)
|
||||||
|
}
|
||||||
|
|
||||||
func (alertAPI *AlertAPI) updateRule(w http.ResponseWriter, req *http.Request, ps httprouter.Params) {
|
func (alertAPI *AlertAPI) updateRule(w http.ResponseWriter, req *http.Request, ps httprouter.Params) {
|
||||||
id := ps.MustGetParameter("rule_id")
|
id := ps.MustGetParameter("rule_id")
|
||||||
obj := &alerting.Rule{}
|
oldRule := &alerting.Rule{}
|
||||||
|
|
||||||
obj.ID = id
|
oldRule.ID = id
|
||||||
exists, err := orm.Get(obj)
|
exists, err := orm.Get(oldRule)
|
||||||
if !exists || err != nil {
|
if !exists || err != nil {
|
||||||
log.Error(err)
|
log.Error(err)
|
||||||
alertAPI.WriteJSON(w, util.MapStr{
|
alertAPI.WriteJSON(w, util.MapStr{
|
||||||
|
@ -204,35 +254,46 @@ func (alertAPI *AlertAPI) updateRule(w http.ResponseWriter, req *http.Request, p
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
id = obj.ID
|
id = oldRule.ID
|
||||||
create := obj.Created
|
create := oldRule.Created
|
||||||
obj = &alerting.Rule{}
|
rule := &alerting.Rule{
|
||||||
err = alertAPI.DecodeJSON(req, obj)
|
}
|
||||||
|
err = alertAPI.DecodeJSON(req, rule)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
alertAPI.WriteError(w, err.Error(), http.StatusInternalServerError)
|
alertAPI.WriteError(w, err.Error(), http.StatusInternalServerError)
|
||||||
log.Error(err)
|
log.Error(err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
rule.Metrics.Expression, err = rule.Metrics.GenerateExpression()
|
||||||
|
if err != nil {
|
||||||
|
alertAPI.WriteError(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
log.Error(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
changeLog, err := util.DiffTwoObject(oldRule, rule)
|
||||||
|
if err != nil {
|
||||||
|
log.Error(err)
|
||||||
|
}
|
||||||
|
|
||||||
//protect
|
//protect
|
||||||
obj.ID = id
|
rule.ID = id
|
||||||
obj.Created = create
|
rule.Created = create
|
||||||
obj.Updated = time.Now()
|
rule.Updated = time.Now()
|
||||||
obj.Metrics.Expression, err = obj.Metrics.GenerateExpression()
|
|
||||||
if err != nil {
|
|
||||||
alertAPI.WriteError(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
log.Error(err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
err = orm.Update(obj)
|
|
||||||
if err != nil {
|
|
||||||
alertAPI.WriteError(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
log.Error(err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if obj.Enabled {
|
err = orm.Update(rule)
|
||||||
exists, err = checkResourceExists(obj)
|
if err != nil {
|
||||||
|
alertAPI.WriteError(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
log.Error(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
saveAlertActivity("alerting_rule_change", "update", util.MapStr{
|
||||||
|
"cluster_id": rule.Resource.ID,
|
||||||
|
"rule_id": rule.ID,
|
||||||
|
"cluster_name": rule.Resource.Name,
|
||||||
|
},changeLog, oldRule)
|
||||||
|
|
||||||
|
if rule.Enabled {
|
||||||
|
exists, err = checkResourceExists(rule)
|
||||||
if err != nil || !exists {
|
if err != nil || !exists {
|
||||||
log.Error(err)
|
log.Error(err)
|
||||||
alertAPI.WriteJSON(w, util.MapStr{
|
alertAPI.WriteJSON(w, util.MapStr{
|
||||||
|
@ -242,22 +303,22 @@ func (alertAPI *AlertAPI) updateRule(w http.ResponseWriter, req *http.Request, p
|
||||||
}
|
}
|
||||||
//update task
|
//update task
|
||||||
task.StopTask(id)
|
task.StopTask(id)
|
||||||
eng := alerting2.GetEngine(obj.Resource.Type)
|
eng := alerting2.GetEngine(rule.Resource.Type)
|
||||||
ruleTask := task.ScheduleTask{
|
ruleTask := task.ScheduleTask{
|
||||||
ID: obj.ID,
|
ID: rule.ID,
|
||||||
Interval: obj.Schedule.Interval,
|
Interval: rule.Schedule.Interval,
|
||||||
Description: obj.Metrics.Expression,
|
Description: rule.Metrics.Expression,
|
||||||
Task: eng.GenerateTask(obj),
|
Task: eng.GenerateTask(rule),
|
||||||
}
|
}
|
||||||
task.RegisterScheduleTask(ruleTask)
|
task.RegisterScheduleTask(ruleTask)
|
||||||
task.StartTask(ruleTask.ID)
|
task.StartTask(ruleTask.ID)
|
||||||
}else{
|
}else{
|
||||||
task.DeleteTask(id)
|
task.DeleteTask(id)
|
||||||
}
|
}
|
||||||
clearKV(obj.ID)
|
clearKV(rule.ID)
|
||||||
|
|
||||||
alertAPI.WriteJSON(w, util.MapStr{
|
alertAPI.WriteJSON(w, util.MapStr{
|
||||||
"_id": obj.ID,
|
"_id": rule.ID,
|
||||||
"result": "updated",
|
"result": "updated",
|
||||||
}, 200)
|
}, 200)
|
||||||
}
|
}
|
||||||
|
@ -289,6 +350,11 @@ func (alertAPI *AlertAPI) deleteRule(w http.ResponseWriter, req *http.Request, p
|
||||||
log.Error(err)
|
log.Error(err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
saveAlertActivity("alerting_rule_change", "delete", util.MapStr{
|
||||||
|
"cluster_id": obj.Resource.ID,
|
||||||
|
"rule_id": obj.ID,
|
||||||
|
"cluster_name": obj.Resource.Name,
|
||||||
|
},nil, &obj)
|
||||||
task.DeleteTask(obj.ID)
|
task.DeleteTask(obj.ID)
|
||||||
clearKV(obj.ID)
|
clearKV(obj.ID)
|
||||||
|
|
||||||
|
@ -654,8 +720,6 @@ func getRuleMetricData( rule *alerting.Rule, filterParam *alerting.FilterParam)
|
||||||
if len(md.Data) == 0 {
|
if len(md.Data) == 0 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
//filteredMetricData = append(filteredMetricData, md)
|
|
||||||
|
|
||||||
targetData := md.Data["result"]
|
targetData := md.Data["result"]
|
||||||
if len(rule.Metrics.Items) == 1 {
|
if len(rule.Metrics.Items) == 1 {
|
||||||
for k, _ := range md.Data {
|
for k, _ := range md.Data {
|
||||||
|
|
|
@ -55,7 +55,6 @@ func TestEngine( t *testing.T) {
|
||||||
|
|
||||||
Metrics: alerting.Metric{
|
Metrics: alerting.Metric{
|
||||||
PeriodInterval: "1m",
|
PeriodInterval: "1m",
|
||||||
MaxPeriods: 15,
|
|
||||||
Items: []alerting.MetricItem{
|
Items: []alerting.MetricItem{
|
||||||
{Name: "a", Field: "payload.elasticsearch.node_stats.fs.total.free_in_bytes", Statistic: "min", Group: []string{"metadata.labels.cluster_id", "metadata.labels.node_id"}},
|
{Name: "a", Field: "payload.elasticsearch.node_stats.fs.total.free_in_bytes", Statistic: "min", Group: []string{"metadata.labels.cluster_id", "metadata.labels.node_id"}},
|
||||||
{Name: "b", Field: "payload.elasticsearch.node_stats.fs.total.total_in_bytes", Statistic: "max", Group: []string{"metadata.labels.cluster_id", "metadata.labels.node_id"}},
|
{Name: "b", Field: "payload.elasticsearch.node_stats.fs.total.total_in_bytes", Statistic: "max", Group: []string{"metadata.labels.cluster_id", "metadata.labels.node_id"}},
|
||||||
|
@ -204,7 +203,6 @@ func TestGeneratePercentilesAggQuery(t *testing.T) {
|
||||||
|
|
||||||
Metrics: alerting.Metric{
|
Metrics: alerting.Metric{
|
||||||
PeriodInterval: "1m",
|
PeriodInterval: "1m",
|
||||||
MaxPeriods: 15,
|
|
||||||
Items: []alerting.MetricItem{
|
Items: []alerting.MetricItem{
|
||||||
{Name: "a", Field: "payload.elasticsearch.index_stats.total.search.query_total", Statistic: "rate", Group: []string{"metadata.labels.cluster_id"}},
|
{Name: "a", Field: "payload.elasticsearch.index_stats.total.search.query_total", Statistic: "rate", Group: []string{"metadata.labels.cluster_id"}},
|
||||||
{Name: "b", Field: "payload.elasticsearch.index_stats.total.search.query_time_in_millis", Statistic: "rate", Group: []string{"metadata.labels.cluster_id"}},
|
{Name: "b", Field: "payload.elasticsearch.index_stats.total.search.query_time_in_millis", Statistic: "rate", Group: []string{"metadata.labels.cluster_id"}},
|
||||||
|
|
Loading…
Reference in New Issue