add aterting overview and sync cluster id
This commit is contained in:
parent
4178e77160
commit
220d990953
|
@ -63,6 +63,7 @@ func Init(cfg *config.AppConfig) {
|
|||
ui.HandleUIMethod(api.GET, "/elasticsearch/:id/alerting/_settings", alerting.GetSettings)
|
||||
ui.HandleUIMethod(api.POST, "/elasticsearch/:id/alerting/_indices", alerting.GetIndices)
|
||||
ui.HandleUIMethod(api.POST, "/elasticsearch/:id/alerting/_aliases", alerting.GetAliases)
|
||||
ui.HandleUIMethod(api.POST, "/elasticsearch/:id/alerting/_mappings", alerting.GetMappings)
|
||||
ui.HandleUIMethod(api.POST, "/elasticsearch/:id/alerting/monitors/_search", alerting.Search)
|
||||
ui.HandleUIMethod(api.GET, "/elasticsearch/:id/alerting/alerts", alerting.GetAlerts)
|
||||
ui.HandleUIMethod(api.POST, "/elasticsearch/:id/alerting/_monitors/:monitorID/_acknowledge/alerts", alerting.AcknowledgeAlerts)
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
package alerting
|
||||
|
||||
type Alert struct {
|
||||
ClusterID string `json:"cluster_id"`
|
||||
ClusterID string `json:"cluster_id" elastic_mapping:"cluster_id:{type:keyword}"`
|
||||
ClusterName string `json:"cluster_name" elastic_mapping:"cluster_name:{type:text}"`
|
||||
AcknowledgedTime *int64 `json:"acknowledged_time" elastic_mapping:"acknowledged_time:{type:date}"`
|
||||
ActionExecutionResults []ActionExecutionResult `json:"action_execution_results" elastic_mapping:"action_execution_results:{type:object}"`
|
||||
AlertHistories []AlertHistory `json:"alert_history" elastic_mapping:"alert_history:{type:object}"`
|
||||
|
|
|
@ -7,7 +7,6 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
httprouter "infini.sh/framework/core/api/router"
|
||||
"infini.sh/framework/core/elastic"
|
||||
"infini.sh/framework/core/orm"
|
||||
"infini.sh/search-center/model/alerting"
|
||||
"io"
|
||||
|
@ -37,11 +36,6 @@ func getAlertIndexName(typ string) string {
|
|||
|
||||
func GetAlerts (w http.ResponseWriter, req *http.Request, ps httprouter.Params){
|
||||
id := ps.ByName("id")
|
||||
conf := elastic.GetConfig(id)
|
||||
if conf == nil {
|
||||
writeError(w, errors.New("cluster not found"))
|
||||
return
|
||||
}
|
||||
|
||||
var (
|
||||
from = getQueryParam(req, "from", "0")
|
||||
|
@ -54,6 +48,7 @@ func GetAlerts (w http.ResponseWriter, req *http.Request, ps httprouter.Params){
|
|||
monitorIds = req.URL.Query()["monitorIds"]
|
||||
params = map[string]string{
|
||||
}
|
||||
alertType = getQueryParam(req, "type", INDEX_ALL_ALERTS)
|
||||
)
|
||||
|
||||
switch sortField {
|
||||
|
@ -79,11 +74,13 @@ func GetAlerts (w http.ResponseWriter, req *http.Request, ps httprouter.Params){
|
|||
params["sortString"]: params["sortOrder"],
|
||||
}
|
||||
must := []IfaceMap{
|
||||
{
|
||||
}
|
||||
if id != "_all" {
|
||||
must = append(must, IfaceMap{
|
||||
"match": IfaceMap{
|
||||
"cluster_id": id,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
if severityLevel != "ALL" {
|
||||
must = append(must, IfaceMap{
|
||||
|
@ -130,7 +127,7 @@ func GetAlerts (w http.ResponseWriter, req *http.Request, ps httprouter.Params){
|
|||
},
|
||||
"sort": sort,
|
||||
}
|
||||
indexName := getAlertIndexName(INDEX_ALL_ALERTS)
|
||||
indexName := getAlertIndexName(alertType)
|
||||
|
||||
config := getDefaultConfig()
|
||||
reqUrl := fmt.Sprintf("%s/%s/_search", config.Endpoint, indexName )
|
||||
|
|
|
@ -18,8 +18,8 @@ type SearchBody struct {
|
|||
|
||||
func Search(w http.ResponseWriter, req *http.Request, ps httprouter.Params){
|
||||
id := ps.ByName("id")
|
||||
conf := elastic.GetConfig(id)
|
||||
if conf == nil {
|
||||
meta := elastic.GetMetadata(id)
|
||||
if meta == nil {
|
||||
writeError(w, errors.New("cluster not found"))
|
||||
return
|
||||
}
|
||||
|
@ -55,8 +55,8 @@ func Search(w http.ResponseWriter, req *http.Request, ps httprouter.Params){
|
|||
|
||||
func GetIndices(w http.ResponseWriter, req *http.Request, ps httprouter.Params) {
|
||||
id := ps.ByName("id")
|
||||
conf := elastic.GetConfig(id)
|
||||
if conf == nil {
|
||||
meta := elastic.GetMetadata(id)
|
||||
if meta == nil {
|
||||
writeError(w, errors.New("cluster not found"))
|
||||
return
|
||||
}
|
||||
|
@ -69,7 +69,7 @@ func GetIndices(w http.ResponseWriter, req *http.Request, ps httprouter.Params)
|
|||
writeError(w, err)
|
||||
return
|
||||
}
|
||||
reqUrl := fmt.Sprintf("%s/_cat/indices/%s", conf.Endpoint, body.Index)
|
||||
reqUrl := fmt.Sprintf("%s/_cat/indices/%s", meta.GetActiveEndpoint(), body.Index)
|
||||
params := map[string]string{
|
||||
"format": "json",
|
||||
"h": "health,index,status",
|
||||
|
@ -100,8 +100,8 @@ func GetAliases(w http.ResponseWriter, req *http.Request, ps httprouter.Params)
|
|||
}
|
||||
}()
|
||||
id := ps.ByName("id")
|
||||
conf := elastic.GetConfig(id)
|
||||
if conf == nil {
|
||||
meta := elastic.GetMetadata(id)
|
||||
if meta == nil {
|
||||
writeError(w, errors.New("cluster not found"))
|
||||
return
|
||||
}
|
||||
|
@ -114,7 +114,7 @@ func GetAliases(w http.ResponseWriter, req *http.Request, ps httprouter.Params)
|
|||
writeError(w, err)
|
||||
return
|
||||
}
|
||||
reqUrl := fmt.Sprintf("%s/_cat/aliases/%s", conf.Endpoint, body.Alias)
|
||||
reqUrl := fmt.Sprintf("%s/_cat/aliases/%s", meta.GetActiveEndpoint(), body.Alias)
|
||||
params := map[string]string{
|
||||
"format": "json",
|
||||
"h": "alias,index",
|
||||
|
@ -140,8 +140,8 @@ func GetAliases(w http.ResponseWriter, req *http.Request, ps httprouter.Params)
|
|||
|
||||
func GetMappings(w http.ResponseWriter, req *http.Request, ps httprouter.Params) {
|
||||
id := ps.ByName("id")
|
||||
conf := elastic.GetConfig(id)
|
||||
if conf == nil {
|
||||
meta := elastic.GetMetadata(id)
|
||||
if meta == nil {
|
||||
writeError(w, errors.New("cluster not found"))
|
||||
return
|
||||
}
|
||||
|
@ -154,7 +154,7 @@ func GetMappings(w http.ResponseWriter, req *http.Request, ps httprouter.Params)
|
|||
writeError(w, err)
|
||||
return
|
||||
}
|
||||
reqUrl := fmt.Sprintf("%s/%s/_mapping", conf.Endpoint, strings.Join(body.Index, ","))
|
||||
reqUrl := fmt.Sprintf("%s/%s/_mapping", meta.GetActiveEndpoint(), strings.Join(body.Index, ","))
|
||||
res, err := doRequest(reqUrl, http.MethodGet, nil, nil)
|
||||
if err != nil {
|
||||
writeError(w, err)
|
||||
|
@ -174,48 +174,16 @@ func GetMappings(w http.ResponseWriter, req *http.Request, ps httprouter.Params)
|
|||
}, http.StatusOK)
|
||||
}
|
||||
|
||||
|
||||
func GetPlugins(w http.ResponseWriter, req *http.Request, ps httprouter.Params) {
|
||||
id := ps.ByName("id")
|
||||
conf := elastic.GetConfig(id)
|
||||
if conf == nil {
|
||||
writeError(w, errors.New("cluster not found"))
|
||||
return
|
||||
}
|
||||
|
||||
reqUrl := fmt.Sprintf("%s/_cat/plugins", conf.Endpoint)
|
||||
res, err := doRequest(reqUrl, http.MethodGet, map[string]string{
|
||||
"format": "json",
|
||||
"h": "component",
|
||||
}, nil)
|
||||
if err != nil {
|
||||
writeError(w, err)
|
||||
return
|
||||
}
|
||||
defer res.Body.Close()
|
||||
var resBody = []IfaceMap{}
|
||||
err = decodeJSON(res.Body, &resBody)
|
||||
if err != nil {
|
||||
writeError(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, IfaceMap{
|
||||
"ok": true,
|
||||
"resp": resBody,
|
||||
}, http.StatusOK)
|
||||
}
|
||||
|
||||
func GetSettings(w http.ResponseWriter, req *http.Request, ps httprouter.Params) {
|
||||
id := ps.ByName("id")
|
||||
conf := elastic.GetConfig(id)
|
||||
if conf == nil {
|
||||
meta := elastic.GetMetadata(id)
|
||||
if meta == nil {
|
||||
writeError(w, errors.New("cluster not found"))
|
||||
return
|
||||
}
|
||||
|
||||
// /_cluster/settings?include_defaults=true
|
||||
reqUrl := fmt.Sprintf("%s/_cluster/settings", conf.Endpoint)
|
||||
reqUrl := fmt.Sprintf("%s/_cluster/settings", meta.GetActiveEndpoint())
|
||||
res, err := doRequest(reqUrl, http.MethodGet, map[string]string{
|
||||
"include_defaults": "true",
|
||||
}, nil)
|
||||
|
|
|
@ -627,8 +627,8 @@ func AcknowledgeAlerts(w http.ResponseWriter, req *http.Request, ps httprouter.P
|
|||
|
||||
func ExecuteMonitor(w http.ResponseWriter, req *http.Request, ps httprouter.Params) {
|
||||
id := ps.ByName("id")
|
||||
conf := elastic.GetConfig(id)
|
||||
if conf == nil {
|
||||
meta := elastic.GetMetadata(id)
|
||||
if meta == nil {
|
||||
writeError(w, errors.New("cluster not found"))
|
||||
return
|
||||
}
|
||||
|
@ -651,7 +651,7 @@ func ExecuteMonitor(w http.ResponseWriter, req *http.Request, ps httprouter.Para
|
|||
return
|
||||
}
|
||||
periodStart := time.Now()
|
||||
reqUrl := fmt.Sprintf("%s/%s/_search", conf.Endpoint, strings.Join(monitor.Inputs[0].Search.Indices, ","))
|
||||
reqUrl := fmt.Sprintf("%s/%s/_search", meta.GetActiveEndpoint(), strings.Join(monitor.Inputs[0].Search.Indices, ","))
|
||||
res, err := doRequest(reqUrl, http.MethodGet, nil, monitor.Inputs[0].Search.Query)
|
||||
if err != nil {
|
||||
writeError(w, err)
|
||||
|
|
|
@ -12,17 +12,132 @@ func GetAlertOverview(w http.ResponseWriter, req *http.Request, ps httprouter.Pa
|
|||
writeError(w, err)
|
||||
return
|
||||
}
|
||||
topTenData, err := getTopTenAlertCluster()
|
||||
if err != nil {
|
||||
writeError(w, err)
|
||||
return
|
||||
}
|
||||
stateCount, err := getAlertByState()
|
||||
if err != nil {
|
||||
writeError(w, err)
|
||||
return
|
||||
}
|
||||
writeJSON(w, IfaceMap{
|
||||
"metrics": IfaceMap{
|
||||
"alert_day": alertDayMetricData,
|
||||
"last_tree_month": IfaceMap{
|
||||
"data": alertDayMetricData,
|
||||
"day": 90,
|
||||
},
|
||||
"top_ten_cluster": IfaceMap{
|
||||
"data": topTenData,
|
||||
},
|
||||
},
|
||||
"state_count": stateCount,
|
||||
"ok": true,
|
||||
}, http.StatusOK)
|
||||
}
|
||||
|
||||
func getLastAlertDayCount() (interface{}, error){
|
||||
func getAlertByState() (IfaceMap, error){
|
||||
reqBody := IfaceMap{
|
||||
"size": 0,
|
||||
"aggs": IfaceMap{
|
||||
"alert_count_by_state": IfaceMap{
|
||||
"terms": IfaceMap{
|
||||
"field": "state",
|
||||
"size": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
buckets, err := queryMetricBuckets(reqBody, "alert_count_by_state", INDEX_ALERT)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var metricData = IfaceMap{}
|
||||
if bks, ok := buckets.([]interface{}); ok {
|
||||
for _, bk := range bks {
|
||||
if bkm, ok := bk.(map[string]interface{}); ok {
|
||||
metricData[queryValue(bkm, "key", "").(string)]= queryValue(bkm, "doc_count", 0)
|
||||
}
|
||||
}
|
||||
}
|
||||
return metricData, nil
|
||||
}
|
||||
|
||||
func queryMetricBuckets(reqBody IfaceMap, metricKey, indexName string)(interface{}, error){
|
||||
conf := getDefaultConfig()
|
||||
reqUrl := fmt.Sprintf("%s/%s/_search", conf.Endpoint, getAlertIndexName(INDEX_ALL_ALERTS))
|
||||
reqUrl := fmt.Sprintf("%s/%s/_search", conf.Endpoint, getAlertIndexName(indexName))
|
||||
res, err := doRequest(reqUrl, http.MethodGet, nil, reqBody)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result := IfaceMap{}
|
||||
defer res.Body.Close()
|
||||
err = decodeJSON(res.Body, &result)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
buckets := queryValue(result, fmt.Sprintf("aggregations.%s.buckets", metricKey), []interface{}{})
|
||||
return buckets, nil
|
||||
}
|
||||
|
||||
func getTopTenAlertCluster()(interface{}, error){
|
||||
reqBody := IfaceMap{
|
||||
"size": 0,
|
||||
"aggs": IfaceMap{
|
||||
"alert_top_ten": IfaceMap{
|
||||
"terms": IfaceMap{
|
||||
"field": "cluster_id",
|
||||
"size": 10,
|
||||
},
|
||||
"aggs": IfaceMap{
|
||||
"group_by_state": IfaceMap{
|
||||
"terms": IfaceMap{
|
||||
"field": "state",
|
||||
"size": 5,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
buckets, err := queryMetricBuckets(reqBody, "alert_top_ten", INDEX_ALL_ALERTS)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var metricData []IfaceMap
|
||||
var clusterIDs []interface{}
|
||||
if bks, ok := buckets.([]interface{}); ok {
|
||||
for _, bk := range bks {
|
||||
if bkm, ok := bk.(map[string]interface{}); ok {
|
||||
stateBuckets := queryValue(bkm, "group_by_state.buckets", nil )
|
||||
key := queryValue(bkm, "key", "" )
|
||||
clusterIDs = append(clusterIDs, key)
|
||||
if stateBKS, ok := stateBuckets.([]interface{}); ok{
|
||||
for _, stateBK := range stateBKS {
|
||||
if stateBKMap, ok := stateBK.(map[string]interface{}); ok {
|
||||
metricData = append(metricData, IfaceMap{
|
||||
"x": key,
|
||||
"y": queryValue(stateBKMap, "doc_count", 0),
|
||||
"g": queryValue(stateBKMap, "key", ""),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
//reqBody = IfaceMap{
|
||||
// "query": IfaceMap{
|
||||
// "terms": IfaceMap{
|
||||
// "_id": clusterIDs,
|
||||
// },
|
||||
// },
|
||||
//}
|
||||
return metricData, nil
|
||||
}
|
||||
|
||||
func getLastAlertDayCount() (interface{}, error){
|
||||
reqBody := IfaceMap{
|
||||
"size": 0,
|
||||
"query": IfaceMap{
|
||||
|
@ -45,18 +160,10 @@ func getLastAlertDayCount() (interface{}, error){
|
|||
},
|
||||
},
|
||||
}
|
||||
|
||||
res, err := doRequest(reqUrl, http.MethodGet, nil, reqBody)
|
||||
buckets, err := queryMetricBuckets(reqBody, "alert_day_count", INDEX_ALL_ALERTS)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result := IfaceMap{}
|
||||
defer res.Body.Close()
|
||||
err = decodeJSON(res.Body, &result)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
buckets := queryValue(result, "aggregations.alert_day_count.buckets", []interface{}{})
|
||||
var metricData []interface{}
|
||||
if bks, ok := buckets.([]interface{}); ok {
|
||||
for _, bk := range bks {
|
||||
|
|
|
@ -173,6 +173,7 @@ func generateMonitorJob(smt *ScheduleMonitor) MonitorJob{
|
|||
Severity: trigger.Severity,
|
||||
State: ALERT_COMPLETED,
|
||||
ClusterID: sm.ClusterID,
|
||||
ClusterName: elastic.GetMetadata(sm.ClusterID).Config.Name,
|
||||
}
|
||||
if !isTrigger {
|
||||
endTime := time.Now().UnixNano()/1e6
|
||||
|
@ -466,8 +467,8 @@ func resolveEmailGroup(ID string)(*alerting.EmailGroup, error){
|
|||
}
|
||||
|
||||
func getQueryResult(clusterID string, input *alerting.MonitorInput) (IfaceMap, error) {
|
||||
conf := elastic.GetConfig(clusterID)
|
||||
reqUrl := fmt.Sprintf("%s/%s/_search", conf.Endpoint, strings.Join(input.Search.Indices, ","))
|
||||
meta := elastic.GetMetadata(clusterID)
|
||||
reqUrl := fmt.Sprintf("%s/%s/_search", meta.GetActiveEndpoint(), strings.Join(input.Search.Indices, ","))
|
||||
res, err := doRequest(reqUrl, http.MethodGet, nil, input.Search.Query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -535,7 +536,7 @@ func resolveTriggerResult(trigger *alerting.Trigger, monitorCtx []byte ) (bool,
|
|||
}
|
||||
|
||||
func getEnabledMonitors() (map[string]ScheduleMonitor, error){
|
||||
config := elastic.GetConfig("default")
|
||||
config := getDefaultConfig()
|
||||
reqUrl := fmt.Sprintf("%s/%s/_search", config.Endpoint, orm.GetIndexName(alerting.Config{}))
|
||||
must := []IfaceMap{
|
||||
{
|
||||
|
|
|
@ -96,7 +96,7 @@ const ConsoleInputUI = ({clusterID, initialText}:ConsoleInputProps) => {
|
|||
const legacyCoreEditor = new LegacyCoreEditor(aceEditor, editorActionsRef.current as HTMLElement);
|
||||
aceEditor.commands.addCommand({
|
||||
name: 'exec_request',
|
||||
bindKey: 'ctrl+enter',
|
||||
bindKey: {win: "Ctrl-enter", mac: "Command-enter|Ctrl-enter"},
|
||||
exec: ()=>{
|
||||
sendCurrentRequestToESRef.current();
|
||||
}
|
||||
|
@ -150,7 +150,7 @@ const ConsoleInputUI = ({clusterID, initialText}:ConsoleInputProps) => {
|
|||
const formattedRequest = requests.map(request => ({
|
||||
method: request.method,
|
||||
path: request.url,
|
||||
body: (request.data || [])[0],
|
||||
body: (request.data || []).join('\n'),
|
||||
}));
|
||||
return formattedRequest;
|
||||
};
|
||||
|
|
|
@ -50,7 +50,6 @@ export const useSendCurrentRequestToES = () => {
|
|||
const { services: { history }, clusterID } = useServicesContext();
|
||||
|
||||
return useCallback(async () => {
|
||||
console.log(clusterID)
|
||||
try {
|
||||
const editor = registry.getInputEditor();
|
||||
const requests = await editor.getRequestsInRange();
|
||||
|
|
|
@ -834,7 +834,6 @@ export default function ({
|
|||
if (ret.method !== 'LOAD') {
|
||||
components = getTopLevelUrlCompleteComponents(context.method);
|
||||
}else{
|
||||
debugger
|
||||
components = getCommandComponents();
|
||||
}
|
||||
populateContext(ret.urlTokenPath, context, editor, true, components);
|
||||
|
|
|
@ -129,7 +129,7 @@ export default {
|
|||
}
|
||||
});
|
||||
},
|
||||
*rewriteURL({payload}, {select}){
|
||||
*rewriteURL({payload}, {select, put}){
|
||||
const {pathname, history, search} = payload;
|
||||
const global = yield select(state=>state.global);
|
||||
if(pathname && global.selectedClusterID){
|
||||
|
@ -137,8 +137,16 @@ export default {
|
|||
if(!pathname.includes('elasticsearch')){
|
||||
history.replace(pathname+newPart+ (search || ''))
|
||||
}else{
|
||||
const newPath = pathname.replace(/\/elasticsearch\/(\w+)\/?/, newPart);
|
||||
history.replace(newPath+(search || ''))
|
||||
const ms = pathname.match(/\/elasticsearch\/(\w+)\/?/);
|
||||
if(ms && ms.length>1 && ms[1] != global.selectedClusterID){
|
||||
console.log(ms[1])
|
||||
yield put({
|
||||
type: 'changeClusterById',
|
||||
payload:{
|
||||
id: ms[1]
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -243,8 +251,7 @@ export default {
|
|||
if(pathname.startsWith("/system")){
|
||||
clusterVisible = false;
|
||||
}else{
|
||||
if(!pathname.startsWith("/exception") && pathname != '/alerting'){
|
||||
if(!pathname.includes('elasticsearch')){
|
||||
if(!pathname.startsWith("/exception")){
|
||||
dispatch({
|
||||
type: 'rewriteURL',
|
||||
payload: {
|
||||
|
@ -253,7 +260,6 @@ export default {
|
|||
search,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
dispatch({
|
||||
|
|
|
@ -14,6 +14,7 @@ export interface AlertRecord {
|
|||
acknowledged_time: TimeValue,
|
||||
action_execution_results?: ActionExecutionResult[];
|
||||
cluster_id: string;
|
||||
cluster_name: string;
|
||||
end_time: TimeValue;
|
||||
error_message: string;
|
||||
id: string;
|
||||
|
@ -36,6 +37,7 @@ export const AlertItem = ({
|
|||
item,
|
||||
onClick
|
||||
}: AlertItemProps)=>{
|
||||
const clusterName = item.cluster_name ? item.cluster_name + ': ' : '';
|
||||
return (
|
||||
<List.Item
|
||||
onClick={()=>{onClick(item)}}
|
||||
|
@ -46,7 +48,7 @@ export const AlertItem = ({
|
|||
<div className={"status" + ` ${item.state}`}>
|
||||
<div>{item.severity}</div>
|
||||
</div>
|
||||
<div className="content">{item.monitor_name+":"+item.trigger_name}</div>
|
||||
<div className="content">{clusterName +item.monitor_name+": "+item.trigger_name}</div>
|
||||
<div className="right">
|
||||
<div className="time">{moment(item.start_time).fromNow()}</div>
|
||||
<div className="arrow">
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
import React, {useEffect, useState} from "react";
|
||||
import {Spin, Card} from 'antd';
|
||||
import './overview.scss';
|
||||
import {
|
||||
Axis,
|
||||
Chart,
|
||||
|
@ -9,8 +11,12 @@ import {
|
|||
ScaleType,
|
||||
Settings,
|
||||
timeFormatter,
|
||||
BarSeries,
|
||||
} from "@elastic/charts";
|
||||
|
||||
import {useAlertData, useAlertHsitoryData} from './hooks/use_alert_data';
|
||||
import {AlertList} from '../Dashboard/components/AlertList/AlertList';
|
||||
|
||||
const theme = {
|
||||
legend: {
|
||||
margin: 0,
|
||||
|
@ -65,42 +71,115 @@ const theme = {
|
|||
|
||||
export default (props)=>{
|
||||
const {httpClient, history} = props;
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [data, setData] = useState({
|
||||
metrics: {
|
||||
alert_day: [],
|
||||
}
|
||||
last_tree_month: {},
|
||||
top_ten_cluster:{},
|
||||
},
|
||||
});
|
||||
useEffect(()=>{
|
||||
httpClient.get('/alerting/overview', {}).then((resp) => {
|
||||
if (resp.ok) {
|
||||
const { metrics } = resp;
|
||||
const { metrics, state_count } = resp;
|
||||
setData({
|
||||
metrics
|
||||
metrics,
|
||||
state_count
|
||||
});
|
||||
} else {
|
||||
console.log('error getting alerts:', resp);
|
||||
}
|
||||
setIsLoading(false)
|
||||
});
|
||||
}, [])
|
||||
const pageSize = 10
|
||||
const [alerts, onAlertPageChange] = useAlertData(pageSize);
|
||||
const [historyAlerts, onAlertHistoryPageChange] = useAlertHsitoryData(pageSize);
|
||||
|
||||
const onItemClick = (item)=>{
|
||||
history.push(`/monitors/${item.monitor_id}/elasticsearch/${item.cluster_id}`)
|
||||
}
|
||||
|
||||
|
||||
return (
|
||||
<div style={{height:'150px'}}>
|
||||
<Chart>
|
||||
<Settings theme={theme} />
|
||||
<Axis id="bottom" position={Position.Bottom} showOverlappingTicks tickFormat={timeFormatter(niceTimeFormatByDay(90))} />
|
||||
<Axis
|
||||
id="left"
|
||||
title={'最近三个月告警统计'}
|
||||
position={Position.Left}
|
||||
/>
|
||||
<LineSeries
|
||||
id="lines"
|
||||
xScaleType={ScaleType.Linear}
|
||||
yScaleType={ScaleType.Linear}
|
||||
xAccessor={0}
|
||||
yAccessors={[1]}
|
||||
data={data.metrics.alert_day}
|
||||
/>
|
||||
</Chart>
|
||||
<div className="overview-wrapper">
|
||||
<Spin spinning={isLoading}>
|
||||
<div className="layout">
|
||||
<div className="left">
|
||||
<div className="state-count">
|
||||
<Card className="item" title="激活告警">
|
||||
{data.state_count?.ACTIVE || 0}
|
||||
</Card>
|
||||
<Card className="item" title="已响应告警" >
|
||||
{data.state_count?.ACKNOWLEDGED || 0}
|
||||
</Card>
|
||||
<Card className="item" title="错误告警">
|
||||
{data.state_count?.ERROR || 0}
|
||||
</Card>
|
||||
</div>
|
||||
<div>
|
||||
<AlertList dataSource={alerts.data}
|
||||
title="Open Alerts"
|
||||
onItemClick={onItemClick}
|
||||
pagination={{
|
||||
pageSize: 10,
|
||||
total: alerts.total,
|
||||
onChange: onAlertPageChange,
|
||||
}}/>
|
||||
</div>
|
||||
<div>
|
||||
<AlertList dataSource={historyAlerts.data}
|
||||
title="History Alerts"
|
||||
onItemClick={onItemClick}
|
||||
pagination={{
|
||||
pageSize: 10,
|
||||
total: historyAlerts.total,
|
||||
onChange: onAlertHistoryPageChange,
|
||||
}}/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="right">
|
||||
<div style={{height:'150px'}}>
|
||||
<Chart>
|
||||
<Settings theme={theme} />
|
||||
<Axis id="bottom" position={Position.Bottom} title="Last 3 months" showOverlappingTicks tickFormat={timeFormatter(niceTimeFormatByDay(data.metrics.last_tree_month.day))} />
|
||||
<Axis
|
||||
id="left"
|
||||
title="Alert number"
|
||||
position={Position.Left}
|
||||
/>
|
||||
<LineSeries
|
||||
id="lines"
|
||||
xScaleType={ScaleType.Time}
|
||||
yScaleType={ScaleType.Linear}
|
||||
xAccessor={0}
|
||||
yAccessors={[1]}
|
||||
data={data.metrics.last_tree_month?.data || []}
|
||||
/>
|
||||
</Chart>
|
||||
</div>
|
||||
<div style={{height:'150px', marginTop: 10}}>
|
||||
<Chart>
|
||||
<Settings showLegend showLegendExtra legendPosition={Position.Right} theme={theme} />
|
||||
<Axis id="bottom" position={Position.Bottom} title="Top 10 cluster" showOverlappingTicks />
|
||||
<Axis id="left2" title="Alert number" position={Position.Left} tickFormat={(d) => Number(d).toFixed(0)} />
|
||||
|
||||
<BarSeries
|
||||
id="bars"
|
||||
xScaleType={ScaleType.Linear}
|
||||
yScaleType={ScaleType.Linear}
|
||||
xAccessor="x"
|
||||
yAccessors={['y']}
|
||||
stackAccessors={['x']}
|
||||
splitSeriesAccessors={['g']}
|
||||
data={data.metrics.top_ten_cluster?.data || []}
|
||||
/>
|
||||
</Chart>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Spin>
|
||||
</div>
|
||||
);
|
||||
}
|
|
@ -0,0 +1,82 @@
|
|||
import {useState, useEffect} from 'react';
|
||||
import _ from 'lodash';
|
||||
|
||||
const getAlerts =
|
||||
async (from, size,type) => {
|
||||
let params = {
|
||||
from,
|
||||
size,
|
||||
type
|
||||
};
|
||||
let qstr = '';
|
||||
for(let key in params){
|
||||
qstr += `&${key}=${params[key]}`;
|
||||
}
|
||||
if(qstr){
|
||||
qstr = `?${qstr.slice(1)}`
|
||||
}
|
||||
const resp = await fetch('/elasticsearch/_all/alerting/alerts'+qstr);
|
||||
return resp.json();
|
||||
// if (resp.ok) {
|
||||
// const { alerts, totalAlerts } = resp;
|
||||
|
||||
}
|
||||
|
||||
export const useAlertData = (pageSize, page)=>{
|
||||
const [size, setSize] = useState(pageSize || 10);
|
||||
const [pageIndex, setPageIndex] = useState(page || 1);
|
||||
const [alertData, setAlertData] = useState({
|
||||
data: [],
|
||||
total: 0,
|
||||
});
|
||||
useEffect(()=>{
|
||||
const from = (pageIndex - 1) * size;
|
||||
const fetchAlerts = async (from, size)=>{
|
||||
const resp = await getAlerts(from, size, 'ALERT');
|
||||
if(resp.ok){
|
||||
const { alerts, totalAlerts } = resp;
|
||||
setAlertData({
|
||||
...alertData,
|
||||
data: alerts,
|
||||
total: totalAlerts,
|
||||
})
|
||||
}
|
||||
}
|
||||
fetchAlerts(from,size);
|
||||
}, [pageIndex, size]);
|
||||
const changePage = (pageIndex) => {
|
||||
setPageIndex(pageIndex);
|
||||
}
|
||||
|
||||
return [alertData, changePage];
|
||||
}
|
||||
|
||||
export const useAlertHsitoryData = (pageSize, page)=>{
|
||||
const [size, setSize] = useState(pageSize || 10);
|
||||
const [pageIndex, setPageIndex] = useState(page || 1);
|
||||
const [alertHisotryData, setAlertHisotryData] = useState({
|
||||
data: [],
|
||||
total: 0,
|
||||
});
|
||||
useEffect(()=>{
|
||||
const from = (pageIndex - 1) * size;
|
||||
const fetchHistoryAlerts = async (from, size)=>{
|
||||
const resp = await getAlerts(from, size, 'ALERT_HISTORY');
|
||||
if(resp.ok){
|
||||
const { alerts, totalAlerts } = resp;
|
||||
setAlertHisotryData({
|
||||
...alertHisotryData,
|
||||
data: alerts,
|
||||
total: totalAlerts,
|
||||
})
|
||||
}
|
||||
}
|
||||
fetchHistoryAlerts(from, size);
|
||||
}, [pageIndex, size])
|
||||
|
||||
const changePage = (pageIndex) => {
|
||||
setPageIndex(pageIndex);
|
||||
}
|
||||
|
||||
return [alertHisotryData, changePage];
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
.layout{
|
||||
display: flex;
|
||||
.left{
|
||||
display: flex;
|
||||
flex: 1 1 60%;
|
||||
flex-direction: column;
|
||||
.state-count{
|
||||
display: flex;
|
||||
text-align: center;
|
||||
justify-content: space-between;
|
||||
.item{
|
||||
min-width: 30%;
|
||||
}
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
}
|
||||
.right{
|
||||
flex: 1 1 40%;
|
||||
}
|
||||
}
|
||||
|
||||
.overview-wrapper {
|
||||
padding: 10px;
|
||||
}
|
Loading…
Reference in New Issue