Commit 5a65dc5d by Torkel Ödegaard

Merge branch 'alerting' of github.com:grafana/grafana into alerting

parents 9b4c0cca 6bf42dde
......@@ -8,104 +8,21 @@ page_keywords: alerting, grafana, plugins, documentation
> Alerting is still in very early development. Please be aware.
The roadmap for alerting is described in [issue #2209](https://github.com/grafana/grafana/issues/2209#issuecomment-210077445) and the current state can be found at this page.
The roadmap for alerting in Grafana have been changing rapidly during last 2-3 months. So make sure you follow the disucssion in the [alerting issue](https://github.com/grafana/grafana/issues/2209).
## Introduction
So far Grafana does only support saving alering rules but not execute it. This means that you have to export them from grafana using the api and import them into your monitoring tool of choice. The current defintion of an alert rule looks like this:
> Alerting is turned off by default and have to be enabled in the config file.
``` go
type AlertRule struct {
Id int64 `json:"id"`
OrgId int64 `json:"-"`
DashboardId int64 `json:"dashboardId"`
PanelId int64 `json:"panelId"`
Query string `json:"query"`
QueryRefId string `json:"queryRefId"`
WarnLevel int64 `json:"warnLevel"`
CritLevel int64 `json:"critLevel"`
WarnOperator string `json:"warnOperator"`
CritOperator string `json:"critOperator"`
Interval string `json:"interval"`
Title string `json:"title"`
Description string `json:"description"`
QueryRange string `json:"queryRange"`
Aggregator string `json:"aggregator"`
State string `json:"state"`
}
```
Grafana lets you define alert rules based on metrics queries on dashboards. Every alert is connected to a panel and when ever the query for the panel is updated the alerting rule is also updated.
So far only the graph panel supports alerting. To enable alerting for a panel go to the alerting tab and press 'Create alert' button.
Most of these properties might require some extra explaination.
## Alert status page
Query: json representation of the query used by grafana. Differes depending on datasource.
QueryRange: The time range for which the query should look back.
Aggregator: How the result should be reduced into a single value. ex avg, sum, min, max
State: Current state of the alert OK, WARN, CRITICAL, ACKNOWLEGED.
You can overview all your current alerts on the alert stats page at /alerting
You can configure these settings in the Alerting tab on graph panels in edit mode. When the dashboard is saved the alert is created or updated based on the dashboard. If you wish to delete an alert you simply set the query to '- select query -' in the alerting tab and save the dashboard.
## Alert notifications
## Api
When an alert is triggered it goes to the notification handler who takes care of sending emails or push data as webhooks.
The alert notifications can be configured on /alerting/notifications
### Alert rules
``` url
GET /api/alerts/rules
```
``` http
state //array of strings *optional*
dashboardId //int *optional*
panelId //int *optional*
Result
[]AlertRule
```
``` http
GET /api/alerts/rules/:alertId
Result AlertRule
```
### Alert state
``` http
GET /api/alerts/rulres/:alertId/states
Result
[
{
alertId: int,
newState: OK, WARN, CRITICAL, ACKNOWLEGED,
created: timestamp,
info: description of what might have caused the changed alert state
}
]
```
``` http
PUT /api/alerts/rulres/:alertId/state
Request
{
alertId: alertid,
newState: OK, WARN, CRITICAL, ACKNOWLEGED,
info: description of what might have caused the changed alert state
}
```
### Alert rule changes
``` http
GET /api/alerts/changes
limit //array of strings *optional*
sinceId //int *optional*
Result
[
{
id: incrementing id,
alertId: alertId,
type: CREATED/UPDATED/DELETED,
created: timestamp,
}
]
```
<!-- This email is sent when an existing user is added to an organization -->
[[Subject .Subject "Grafana Alert: [ [[.State]] ] [[.Name]]" ]]
Alertstate: [[.State]]<br />
[[.AlertPageUrl]]<br />
[[.DashboardLink]]<br />
[[.Description]]<br />
[[if eq .State "Ok"]]
Everything is Ok
[[end]]
<img src="[[.DashboardImage]]" />
[[if ne .State "Ok" ]]
<table class="row">
<tr>
<td class="expander">Serie</td>
<td class="expander">State</td>
<td class="expander">Actual value</td>
</tr>
[[ range $ta := .TriggeredAlerts]]
<tr>
<td class="expander">[[$ta.Name]]</td>
<td class="expander">[[$ta.State]]</td>
<td class="expander">[[$ta.ActualValue]]</td>
</tr>
[[end]]
</table>
[[end]]
......@@ -82,7 +82,7 @@ func GetAlerts(c *middleware.Context) Response {
//TODO: should be possible to speed this up with lookup table
for _, alert := range alertDTOs {
for _, dash := range *dashboardsQuery.Result {
for _, dash := range dashboardsQuery.Result {
if alert.DashboardId == dash.Id {
alert.DashbboardUri = "db/" + dash.Slug
}
......@@ -140,6 +140,7 @@ func GetAlertStates(c *middleware.Context) Response {
// PUT /api/alerts/events/:id
func PutAlertState(c *middleware.Context, cmd models.UpdateAlertStateCommand) Response {
cmd.AlertId = c.ParamsInt64(":alertId")
cmd.OrgId = c.OrgId
query := models.GetAlertByIdQuery{Id: cmd.AlertId}
if err := bus.Dispatch(&query); err != nil {
......@@ -156,3 +157,73 @@ func PutAlertState(c *middleware.Context, cmd models.UpdateAlertStateCommand) Re
return Json(200, cmd.Result)
}
func GetAlertNotifications(c *middleware.Context) Response {
query := &models.GetAlertNotificationQuery{
OrgID: c.OrgId,
}
if err := bus.Dispatch(query); err != nil {
return ApiError(500, "Failed to get alert notifications", err)
}
var result []dtos.AlertNotificationDTO
for _, notification := range query.Result {
result = append(result, dtos.AlertNotificationDTO{
Id: notification.Id,
Name: notification.Name,
Type: notification.Type,
Created: notification.Created,
Updated: notification.Updated,
})
}
return Json(200, result)
}
func GetAlertNotificationById(c *middleware.Context) Response {
query := &models.GetAlertNotificationQuery{
OrgID: c.OrgId,
Id: c.ParamsInt64("notificationId"),
}
if err := bus.Dispatch(query); err != nil {
return ApiError(500, "Failed to get alert notifications", err)
}
return Json(200, query.Result[0])
}
func CreateAlertNotification(c *middleware.Context, cmd models.CreateAlertNotificationCommand) Response {
cmd.OrgID = c.OrgId
if err := bus.Dispatch(&cmd); err != nil {
return ApiError(500, "Failed to create alert notification", err)
}
return Json(200, cmd.Result)
}
func UpdateAlertNotification(c *middleware.Context, cmd models.UpdateAlertNotificationCommand) Response {
cmd.OrgID = c.OrgId
if err := bus.Dispatch(&cmd); err != nil {
return ApiError(500, "Failed to update alert notification", err)
}
return Json(200, cmd.Result)
}
func DeleteAlertNotification(c *middleware.Context) Response {
cmd := models.DeleteAlertNotificationCommand{
OrgId: c.OrgId,
Id: c.ParamsInt64("notificationId"),
}
if err := bus.Dispatch(&cmd); err != nil {
return ApiError(500, "Failed to delete alert notification", err)
}
return Json(200, map[string]interface{}{"notificationId": cmd.Id})
}
......@@ -62,6 +62,7 @@ func Register(r *macaron.Macaron) {
r.Get("/playlists/", reqSignedIn, Index)
r.Get("/playlists/*", reqSignedIn, Index)
r.Get("/alerting/", reqSignedIn, Index)
r.Get("/alerting/*", reqSignedIn, Index)
// sign up
r.Get("/signup", Index)
......@@ -247,13 +248,22 @@ func Register(r *macaron.Macaron) {
r.Group("/alerts", func() {
r.Group("/rules", func() {
r.Get("/:alertId/states", wrap(GetAlertStates))
r.Put("/:alertId/state", bind(m.UpdateAlertStateCommand{}), wrap(PutAlertState))
//r.Put("/:alertId/state", bind(m.UpdateAlertStateCommand{}), wrap(PutAlertState))
r.Get("/:alertId", ValidateOrgAlert, wrap(GetAlert))
//r.Delete("/:alertId", ValidateOrgAlert, wrap(DelAlert)) disabled until we know how to handle it dashboard updates
r.Get("/", wrap(GetAlerts))
})
r.Get("/changes", wrap(GetAlertChanges))
r.Get("/notifications", wrap(GetAlertNotifications))
r.Group("/notification", func() {
r.Post("/", bind(m.CreateAlertNotificationCommand{}), wrap(CreateAlertNotification))
r.Put("/:notificationId", bind(m.UpdateAlertNotificationCommand{}), wrap(UpdateAlertNotification))
r.Get("/:notificationId", wrap(GetAlertNotificationById))
r.Delete("/:notificationId", wrap(DeleteAlertNotification))
}, reqOrgAdmin)
//r.Get("/changes", wrap(GetAlertChanges))
})
// error test
......
package dtos
import "time"
type AlertRuleDTO struct {
Id int64 `json:"id"`
DashboardId int64 `json:"dashboardId"`
......@@ -19,3 +21,11 @@ type AlertRuleDTO struct {
DashbboardUri string `json:"dashboardUri"`
}
type AlertNotificationDTO struct {
Id int64 `json:"id"`
Name string `json:"name"`
Type string `json:"type"`
Created time.Time `json:"created"`
Updated time.Time `json:"updated"`
}
......@@ -80,10 +80,16 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) {
})
if setting.AlertingEnabled && (c.OrgRole == m.ROLE_ADMIN || c.OrgRole == m.ROLE_EDITOR) {
alertChildNavs := []*dtos.NavLink{
{Text: "Home", Url: setting.AppSubUrl + "/alerting"},
{Text: "Notifications", Url: setting.AppSubUrl + "/alerting/notifications"},
}
data.MainNavLinks = append(data.MainNavLinks, &dtos.NavLink{
Text: "Alerting",
Icon: "icon-gf icon-gf-monitoring",
Url: setting.AppSubUrl + "/alerting",
Children: alertChildNavs,
})
}
......
......@@ -18,7 +18,7 @@ func populateDashboardsById(dashboardByIds []int64) ([]m.PlaylistDashboardDto, e
return result, err
}
for _, item := range *dashboardQuery.Result {
for _, item := range dashboardQuery.Result {
result = append(result, m.PlaylistDashboardDto{
Id: item.Id,
Slug: item.Slug,
......
......@@ -28,6 +28,10 @@ func (alert *Alert) ValidToSave() bool {
return alert.DashboardId != 0 && alert.OrgId != 0 && alert.PanelId != 0
}
func (alert *Alert) ShouldUpdateState(newState string) bool {
return alert.State != newState
}
func (this *Alert) ContainsUpdates(other *Alert) bool {
result := false
result = result || this.Name != other.Name
......
package models
import (
"time"
"github.com/grafana/grafana/pkg/components/simplejson"
)
type AlertNotification struct {
Id int64 `json:"id"`
OrgId int64 `json:"-"`
Name string `json:"name"`
Type string `json:"type"`
AlwaysExecute bool `json:"alwaysExecute"`
Settings *simplejson.Json `json:"settings"`
Created time.Time `json:"created"`
Updated time.Time `json:"updated"`
}
type CreateAlertNotificationCommand struct {
Name string `json:"name" binding:"Required"`
Type string `json:"type" binding:"Required"`
AlwaysExecute bool `json:"alwaysExecute"`
OrgID int64 `json:"-"`
Settings *simplejson.Json `json:"settings"`
Result *AlertNotification
}
type UpdateAlertNotificationCommand struct {
Id int64 `json:"id" binding:"Required"`
Name string `json:"name" binding:"Required"`
Type string `json:"type" binding:"Required"`
AlwaysExecute bool `json:"alwaysExecute"`
OrgID int64 `json:"-"`
Settings *simplejson.Json `json:"settings" binding:"Required"`
Result *AlertNotification
}
type DeleteAlertNotificationCommand struct {
Id int64
OrgId int64
}
type GetAlertNotificationQuery struct {
Name string
Id int64
Ids []int64
OrgID int64
IncludeAlwaysExecute bool
Result []*AlertNotification
}
......@@ -3,6 +3,7 @@ package models
import (
"time"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/services/alerting/alertstates"
)
......@@ -13,6 +14,7 @@ type AlertState struct {
NewState string `json:"newState"`
Created time.Time `json:"created"`
Info string `json:"info"`
TriggeredAlerts *simplejson.Json `json:"triggeredAlerts"`
}
func (this *UpdateAlertStateCommand) IsValidState() bool {
......@@ -28,8 +30,10 @@ func (this *UpdateAlertStateCommand) IsValidState() bool {
type UpdateAlertStateCommand struct {
AlertId int64 `json:"alertId" binding:"Required"`
OrgId int64 `json:"orgId" binding:"Required"`
NewState string `json:"newState" binding:"Required"`
Info string `json:"info"`
TriggeredAlerts *simplejson.Json `json:"triggeredAlerts"`
Result *Alert
}
......@@ -42,3 +46,10 @@ type GetAlertsStateQuery struct {
Result *[]AlertState
}
type GetLastAlertStateQuery struct {
AlertId int64
OrgId int64
Result *AlertState
}
......@@ -151,7 +151,7 @@ type GetDashboardTagsQuery struct {
type GetDashboardsQuery struct {
DashboardIds []int64
Result *[]Dashboard
Result []*Dashboard
}
type GetDashboardSlugByIdQuery struct {
......
......@@ -12,6 +12,13 @@ type SendEmailCommand struct {
Info string
}
type SendWebhook struct {
Url string
User string
Password string
Body string
}
type SendResetPasswordEmailCommand struct {
User *User
}
......
......@@ -4,6 +4,7 @@ import (
"fmt"
"regexp"
"strconv"
"strings"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/services/alerting/transformers"
......@@ -26,6 +27,8 @@ type AlertRule struct {
Transform string
TransformParams simplejson.Json
Transformer transformers.Transformer
NotificationGroups []int64
}
var (
......@@ -61,7 +64,18 @@ func NewAlertRuleFromDBModel(ruleDef *m.Alert) (*AlertRule, error) {
model.State = ruleDef.State
model.Frequency = ruleDef.Frequency
critical := ruleDef.Settings.Get("critical")
ngs := ruleDef.Settings.Get("notificationGroups").MustString()
var ids []int64
for _, v := range strings.Split(ngs, ",") {
id, err := strconv.Atoi(v)
if err == nil {
ids = append(ids, int64(id))
}
}
model.NotificationGroups = ids
critical := ruleDef.Settings.Get("crit")
model.Critical = Level{
Operator: critical.Get("op").MustString(),
Value: critical.Get("value").MustFloat64(),
......@@ -74,6 +88,10 @@ func NewAlertRuleFromDBModel(ruleDef *m.Alert) (*AlertRule, error) {
}
model.Transform = ruleDef.Settings.Get("transform").Get("type").MustString()
if model.Transform == "" {
return nil, fmt.Errorf("missing transform")
}
model.TransformParams = *ruleDef.Settings.Get("transform")
if model.Transform == "aggregation" {
......@@ -87,7 +105,6 @@ func NewAlertRuleFromDBModel(ruleDef *m.Alert) (*AlertRule, error) {
DatasourceId: query.Get("datasourceId").MustInt64(),
From: query.Get("from").MustString(),
To: query.Get("to").MustString(),
Aggregator: query.Get("agg").MustString(),
}
if model.Query.Query == "" {
......
......@@ -38,7 +38,7 @@ func TestAlertRuleModel(t *testing.T) {
"description": "desc2",
"handler": 0,
"enabled": true,
"critical": {
"crit": {
"value": 20,
"op": ">"
},
......@@ -55,7 +55,7 @@ func TestAlertRuleModel(t *testing.T) {
"datasourceId": 1
},
"transform": {
"method": "avg",
"type": "avg",
"name": "aggregation"
}
}
......@@ -75,11 +75,12 @@ func TestAlertRuleModel(t *testing.T) {
alertRule, err := NewAlertRuleFromDBModel(alert)
So(err, ShouldBeNil)
So(alertRule.Critical.Operator, ShouldEqual, ">")
So(alertRule.Critical.Value, ShouldEqual, 20)
So(alertRule.Warning.Operator, ShouldEqual, ">")
So(alertRule.Warning.Value, ShouldEqual, 10)
So(alertRule.Critical.Operator, ShouldEqual, ">")
So(alertRule.Critical.Value, ShouldEqual, 20)
})
})
}
package datasources
// GetSeries returns timeseries data from the datasource
package datasources
// import (
// "bytes"
// "encoding/json"
// "fmt"
// "io/ioutil"
// "net/http"
// "net/url"
// "strconv"
// "time"
//
// "github.com/grafana/grafana/pkg/components/simplejson"
// "github.com/grafana/grafana/pkg/log"
// m "github.com/grafana/grafana/pkg/models"
// "github.com/grafana/grafana/pkg/util"
// )
//
// type GraphiteClient struct{}
//
// type GraphiteSerie struct {
// Datapoints [][2]float64
// Target string
// }
//
// var DefaultClient = &http.Client{
// Timeout: time.Minute,
// }
//
// type GraphiteResponse []GraphiteSerie
//
// func (client GraphiteClient) GetSeries(rule m.AlertJob, datasource m.DataSource) (m.TimeSeriesSlice, error) {
// v := url.Values{
// "format": []string{"json"},
// "target": []string{getTargetFromRule(rule.Rule)},
// "until": []string{"now"},
// "from": []string{"-" + strconv.Itoa(rule.Rule.QueryRange) + "s"},
// }
//
// log.Trace("Graphite: sending request with querystring: ", v.Encode())
//
// req, err := http.NewRequest("POST", datasource.Url+"/render", nil)
//
// if err != nil {
// return nil, fmt.Errorf("Could not create request")
// }
//
// req.Body = ioutil.NopCloser(bytes.NewReader([]byte(v.Encode())))
//
// if datasource.BasicAuth {
// req.Header.Add("Authorization", util.GetBasicAuthHeader(datasource.User, datasource.Password))
// }
//
// res, err := DefaultClient.Do(req)
//
// if err != nil {
// return nil, err
// }
//
// if res.StatusCode != http.StatusOK {
// return nil, fmt.Errorf("expected httpstatus 200, found %d", res.StatusCode)
// }
//
// response := GraphiteResponse{}
//
// json.NewDecoder(res.Body).Decode(&response)
//
// var timeSeries []*m.TimeSeries
// for _, v := range response {
// timeSeries = append(timeSeries, m.NewTimeSeries(v.Target, v.Datapoints))
// }
//
// return timeSeries, nil
// }
//
// func getTargetFromRule(rule m.AlertRule) string {
// json, _ := simplejson.NewJson([]byte(rule.Query))
//
// return json.Get("target").MustString()
// }
......@@ -5,9 +5,7 @@ import (
"time"
"github.com/benbjohnson/clock"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/log"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/alerting/alertstates"
)
......@@ -20,6 +18,7 @@ type Engine struct {
handler AlertingHandler
ruleReader RuleReader
log log.Logger
responseHandler ResultHandler
}
func NewEngine() *Engine {
......@@ -31,6 +30,7 @@ func NewEngine() *Engine {
handler: NewHandler(),
ruleReader: NewRuleReader(),
log: log.New("alerting.engine"),
responseHandler: NewResultHandler(),
}
return e
......@@ -52,7 +52,7 @@ func (e *Engine) Stop() {
func (e *Engine) alertingTicker() {
defer func() {
if err := recover(); err != nil {
e.log.Error("Scheduler Panic, stopping...", "error", err, "stack", log.Stack(1))
e.log.Error("Scheduler Panic: stopping alertingTicker", "error", err, "stack", log.Stack(1))
}
}()
......@@ -73,6 +73,12 @@ func (e *Engine) alertingTicker() {
}
func (e *Engine) execDispatch() {
defer func() {
if err := recover(); err != nil {
e.log.Error("Scheduler Panic: stopping executor", "error", err, "stack", log.Stack(1))
}
}()
for job := range e.execQueue {
log.Trace("Alerting: engine:execDispatch() starting job %s", job.Rule.Name)
job.Running = true
......@@ -93,6 +99,7 @@ func (e *Engine) executeJob(job *AlertJob) {
Duration: float64(time.Since(now).Nanoseconds()) / float64(1000000),
Error: fmt.Errorf("Timeout"),
AlertJob: job,
ExeuctionTime: time.Now(),
}
e.log.Debug("Job Execution timeout", "alertRuleId", job.Rule.Id)
case result := <-resultChan:
......@@ -103,6 +110,12 @@ func (e *Engine) executeJob(job *AlertJob) {
}
func (e *Engine) resultHandler() {
defer func() {
if err := recover(); err != nil {
e.log.Error("Engine Panic, stopping resultHandler", "error", err, "stack", log.Stack(1))
}
}()
for result := range e.resultQueue {
e.log.Debug("Alert Rule Result", "ruleId", result.AlertJob.Rule.Id, "state", result.State, "value", result.ActualValue, "retry", result.AlertJob.RetryCount)
......@@ -119,23 +132,11 @@ func (e *Engine) resultHandler() {
result.State = alertstates.Critical
result.Description = fmt.Sprintf("Failed to run check after %d retires, Error: %v", maxAlertExecutionRetries, result.Error)
e.saveState(result)
e.responseHandler.Handle(result)
}
} else {
result.AlertJob.ResetRetry()
e.saveState(result)
e.responseHandler.Handle(result)
}
}
}
func (e *Engine) saveState(result *AlertResult) {
cmd := &m.UpdateAlertStateCommand{
AlertId: result.AlertJob.Rule.Id,
NewState: result.State,
Info: result.Description,
}
if err := bus.Dispatch(cmd); err != nil {
e.log.Error("Failed to save state", "error", err)
}
}
......@@ -23,28 +23,28 @@ func NewDashAlertExtractor(dash *m.Dashboard, orgId int64) *DashAlertExtractor {
}
}
func (e *DashAlertExtractor) lookupDatasourceId(dsName string) (int64, error) {
func (e *DashAlertExtractor) lookupDatasourceId(dsName string) (*m.DataSource, error) {
if dsName == "" {
query := &m.GetDataSourcesQuery{OrgId: e.OrgId}
if err := bus.Dispatch(query); err != nil {
return 0, err
return nil, err
} else {
for _, ds := range query.Result {
if ds.IsDefault {
return ds.Id, nil
return ds, nil
}
}
}
} else {
query := &m.GetDataSourceByNameQuery{Name: dsName, OrgId: e.OrgId}
if err := bus.Dispatch(query); err != nil {
return 0, err
return nil, err
} else {
return query.Result.Id, nil
return query.Result, nil
}
}
return 0, errors.New("Could not find datasource id for " + dsName)
return nil, errors.New("Could not find datasource id for " + dsName)
}
func (e *DashAlertExtractor) GetAlerts() ([]*m.Alert, error) {
......@@ -94,10 +94,11 @@ func (e *DashAlertExtractor) GetAlerts() ([]*m.Alert, error) {
dsName = panel.Get("datasource").MustString()
}
if datasourceId, err := e.lookupDatasourceId(dsName); err != nil {
if datasource, err := e.lookupDatasourceId(dsName); err != nil {
return nil, err
} else {
valueQuery.SetPath([]string{"datasourceId"}, datasourceId)
valueQuery.SetPath([]string{"datasourceId"}, datasource.Id)
valueQuery.SetPath([]string{"datasourceType"}, datasource.Type)
}
targetQuery := target.Get("target").MustString()
......
......@@ -52,8 +52,8 @@ func TestAlertRuleExtraction(t *testing.T) {
"to": "now"
},
"transform": {
"method": "avg",
"type": "aggregation"
"type": "avg",
"name": "aggregation"
},
"warn": {
"value": 10,
......@@ -87,7 +87,7 @@ func TestAlertRuleExtraction(t *testing.T) {
"to": "now"
},
"transform": {
"method": "avg",
"type": "avg",
"name": "aggregation"
},
"warn": {
......
......@@ -2,6 +2,7 @@ package alerting
import (
"fmt"
"time"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/log"
......@@ -31,6 +32,7 @@ func (e *HandlerImpl) Execute(job *AlertJob, resultQueue chan *AlertResult) {
Error: err,
State: alertstates.Pending,
AlertJob: job,
ExeuctionTime: time.Now(),
}
}
......@@ -102,17 +104,20 @@ func (e *HandlerImpl) evaluateRule(rule *AlertRule, series tsdb.TimeSeriesSlice)
transformedValue, _ := rule.Transformer.Transform(serie)
critResult := evalCondition(rule.Critical, transformedValue)
e.log.Debug("Alert execution Crit", "name", serie.Name, "transformedValue", transformedValue, "operator", rule.Critical.Operator, "level", rule.Critical.Value, "result", critResult)
condition2 := fmt.Sprintf("%v %s %v ", transformedValue, rule.Critical.Operator, rule.Critical.Value)
e.log.Debug("Alert execution Crit", "name", serie.Name, "condition", condition2, "result", critResult)
if critResult {
triggeredAlert = append(triggeredAlert, &TriggeredAlert{
State: alertstates.Critical,
ActualValue: transformedValue,
Name: serie.Name,
})
continue
}
warnResult := evalCondition(rule.Warning, transformedValue)
e.log.Debug("Alert execution Warn", "name", serie.Name, "transformedValue", transformedValue, "operator", rule.Warning.Operator, "level", rule.Warning.Value, "result", warnResult)
condition := fmt.Sprintf("%v %s %v ", transformedValue, rule.Warning.Operator, rule.Warning.Value)
e.log.Debug("Alert execution Warn", "name", serie.Name, "condition", condition, "result", warnResult)
if warnResult {
triggeredAlert = append(triggeredAlert, &TriggeredAlert{
State: alertstates.Warn,
......@@ -123,7 +128,6 @@ func (e *HandlerImpl) evaluateRule(rule *AlertRule, series tsdb.TimeSeriesSlice)
}
executionState := alertstates.Ok
description := ""
for _, raised := range triggeredAlert {
if raised.State == alertstates.Critical {
executionState = alertstates.Critical
......@@ -132,9 +136,7 @@ func (e *HandlerImpl) evaluateRule(rule *AlertRule, series tsdb.TimeSeriesSlice)
if executionState != alertstates.Critical && raised.State == alertstates.Warn {
executionState = alertstates.Warn
}
description += fmt.Sprintf(descriptionFmt, raised.ActualValue, raised.Name)
}
return &AlertResult{State: executionState, Description: description, TriggeredAlerts: triggeredAlert}
return &AlertResult{State: executionState, Description: "Returned " + executionState, TriggeredAlerts: triggeredAlert, ExeuctionTime: time.Now()}
}
......@@ -10,3 +10,7 @@ type Scheduler interface {
Tick(time time.Time, execQueue chan *AlertJob)
Update(rules []*AlertRule)
}
type Notifier interface {
Notify(alertResult *AlertResult)
}
package alerting
import "time"
type AlertJob struct {
Offset int64
Delay bool
......@@ -28,6 +30,7 @@ type AlertResult struct {
Description string
Error error
AlertJob *AlertJob
ExeuctionTime time.Time
}
type TriggeredAlert struct {
......@@ -44,7 +47,6 @@ type Level struct {
type AlertQuery struct {
Query string
DatasourceId int64
Aggregator string
From string
To string
}
package alerting
import (
"fmt"
"strconv"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/log"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/alerting/alertstates"
"github.com/grafana/grafana/pkg/setting"
)
type NotifierImpl struct {
log log.Logger
getNotifications func(orgId int64, notificationGroups []int64) []*Notification
}
func NewNotifier() *NotifierImpl {
log := log.New("alerting.notifier")
return &NotifierImpl{
log: log,
getNotifications: buildGetNotifiers(log),
}
}
func (n NotifierImpl) ShouldDispath(alertResult *AlertResult, notifier *Notification) bool {
warn := alertResult.State == alertstates.Warn && notifier.SendWarning
crit := alertResult.State == alertstates.Critical && notifier.SendCritical
return (warn || crit) || alertResult.State == alertstates.Ok
}
func (n *NotifierImpl) Notify(alertResult *AlertResult) {
notifiers := n.getNotifications(alertResult.AlertJob.Rule.OrgId, alertResult.AlertJob.Rule.NotificationGroups)
for _, notifier := range notifiers {
if n.ShouldDispath(alertResult, notifier) {
n.log.Info("Sending notification", "state", alertResult.State, "type", notifier.Type)
go notifier.Notifierr.Dispatch(alertResult)
}
}
}
type Notification struct {
Name string
Type string
SendWarning bool
SendCritical bool
Notifierr NotificationDispatcher
}
type EmailNotifier struct {
To string
log log.Logger
}
func (this *EmailNotifier) Dispatch(alertResult *AlertResult) {
this.log.Info("Sending email")
grafanaUrl := fmt.Sprintf("%s:%s", setting.HttpAddr, setting.HttpPort)
if setting.AppSubUrl != "" {
grafanaUrl += "/" + setting.AppSubUrl
}
query := &m.GetDashboardsQuery{
DashboardIds: []int64{alertResult.AlertJob.Rule.DashboardId},
}
if err := bus.Dispatch(query); err != nil {
this.log.Error("Failed to load dashboard", "error", err)
return
}
if len(query.Result) != 1 {
this.log.Error("Can only support one dashboard", "result", len(query.Result))
return
}
dashboard := query.Result[0]
panelId := strconv.Itoa(int(alertResult.AlertJob.Rule.PanelId))
//TODO: get from alertrule and transforms to seconds
from := "1466169458375"
to := "1466171258375"
renderUrl := fmt.Sprintf("%s/render/dashboard-solo/db/%s?from=%s&to=%s&panelId=%s&width=1000&height=500", grafanaUrl, dashboard.Slug, from, to, panelId)
cmd := &m.SendEmailCommand{
Data: map[string]interface{}{
"Name": "Name",
"State": alertResult.State,
"Description": alertResult.Description,
"TriggeredAlerts": alertResult.TriggeredAlerts,
"DashboardLink": grafanaUrl + "/dashboard/db/" + dashboard.Slug,
"AlertPageUrl": grafanaUrl + "/alerting",
"DashboardImage": renderUrl,
},
To: []string{this.To},
Template: "alert_notification.html",
}
err := bus.Dispatch(cmd)
if err != nil {
this.log.Error("Could not send alert notification as email", "error", err)
}
}
type WebhookNotifier struct {
Url string
User string
Password string
log log.Logger
}
func (this *WebhookNotifier) Dispatch(alertResult *AlertResult) {
this.log.Info("Sending webhook")
bodyJSON := simplejson.New()
bodyJSON.Set("name", alertResult.AlertJob.Rule.Name)
bodyJSON.Set("state", alertResult.State)
bodyJSON.Set("trigged", alertResult.TriggeredAlerts)
body, _ := bodyJSON.MarshalJSON()
cmd := &m.SendWebhook{
Url: this.Url,
User: this.User,
Password: this.Password,
Body: string(body),
}
bus.Dispatch(cmd)
}
type NotificationDispatcher interface {
Dispatch(alertResult *AlertResult)
}
func buildGetNotifiers(log log.Logger) func(orgId int64, notificationGroups []int64) []*Notification {
return func(orgId int64, notificationGroups []int64) []*Notification {
query := &m.GetAlertNotificationQuery{
OrgID: orgId,
Ids: notificationGroups,
IncludeAlwaysExecute: true,
}
err := bus.Dispatch(query)
if err != nil {
log.Error("Failed to read notifications", "error", err)
}
var result []*Notification
for _, notification := range query.Result {
not, err := NewNotificationFromDBModel(notification)
if err == nil {
result = append(result, not)
} else {
log.Error("Failed to read notification model", "error", err)
}
}
return result
}
}
func NewNotificationFromDBModel(model *m.AlertNotification) (*Notification, error) {
notifier, err := createNotifier(model.Type, model.Settings)
if err != nil {
return nil, err
}
return &Notification{
Name: model.Name,
Type: model.Type,
Notifierr: notifier,
SendCritical: model.Settings.Get("sendCrit").MustBool(),
SendWarning: model.Settings.Get("sendWarn").MustBool(),
}, nil
}
var createNotifier = func(notificationType string, settings *simplejson.Json) (NotificationDispatcher, error) {
if notificationType == "email" {
to := settings.Get("to").MustString()
if to == "" {
return nil, fmt.Errorf("Could not find to propertie in settings")
}
return &EmailNotifier{
To: to,
log: log.New("alerting.notification.email"),
}, nil
}
url := settings.Get("url").MustString()
if url == "" {
return nil, fmt.Errorf("Could not find url propertie in settings")
}
return &WebhookNotifier{
Url: url,
User: settings.Get("user").MustString(),
Password: settings.Get("password").MustString(),
log: log.New("alerting.notification.webhook"),
}, nil
}
package alerting
import (
"testing"
"reflect"
"github.com/grafana/grafana/pkg/components/simplejson"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/alerting/alertstates"
. "github.com/smartystreets/goconvey/convey"
)
func TestAlertNotificationExtraction(t *testing.T) {
Convey("Notifier tests", t, func() {
Convey("rules for sending notifications", func() {
dummieNotifier := NotifierImpl{}
result := &AlertResult{
State: alertstates.Critical,
}
notifier := &Notification{
Name: "Test Notifier",
Type: "TestType",
SendCritical: true,
SendWarning: true,
}
Convey("Should send notification", func() {
So(dummieNotifier.ShouldDispath(result, notifier), ShouldBeTrue)
})
Convey("warn:false and state:warn should not send", func() {
result.State = alertstates.Warn
notifier.SendWarning = false
So(dummieNotifier.ShouldDispath(result, notifier), ShouldBeFalse)
})
})
Convey("Parsing alert notification from settings", func() {
Convey("Parsing email", func() {
Convey("empty settings should return error", func() {
json := `{ }`
settingsJSON, _ := simplejson.NewJson([]byte(json))
model := &m.AlertNotification{
Name: "ops",
Type: "email",
Settings: settingsJSON,
}
_, err := NewNotificationFromDBModel(model)
So(err, ShouldNotBeNil)
})
Convey("from settings", func() {
json := `
{
"to": "ops@grafana.org"
}`
settingsJSON, _ := simplejson.NewJson([]byte(json))
model := &m.AlertNotification{
Name: "ops",
Type: "email",
Settings: settingsJSON,
}
not, err := NewNotificationFromDBModel(model)
So(err, ShouldBeNil)
So(not.Name, ShouldEqual, "ops")
So(not.Type, ShouldEqual, "email")
So(reflect.TypeOf(not.Notifierr).Elem().String(), ShouldEqual, "alerting.EmailNotifier")
email := not.Notifierr.(*EmailNotifier)
So(email.To, ShouldEqual, "ops@grafana.org")
})
})
Convey("Parsing webhook", func() {
Convey("empty settings should return error", func() {
json := `{ }`
settingsJSON, _ := simplejson.NewJson([]byte(json))
model := &m.AlertNotification{
Name: "ops",
Type: "webhook",
Settings: settingsJSON,
}
_, err := NewNotificationFromDBModel(model)
So(err, ShouldNotBeNil)
})
Convey("from settings", func() {
json := `
{
"url": "http://localhost:3000",
"username": "username",
"password": "password"
}`
settingsJSON, _ := simplejson.NewJson([]byte(json))
model := &m.AlertNotification{
Name: "slack",
Type: "webhook",
Settings: settingsJSON,
}
not, err := NewNotificationFromDBModel(model)
So(err, ShouldBeNil)
So(not.Name, ShouldEqual, "slack")
So(not.Type, ShouldEqual, "webhook")
So(reflect.TypeOf(not.Notifierr).Elem().String(), ShouldEqual, "alerting.WebhookNotifier")
webhook := not.Notifierr.(*WebhookNotifier)
So(webhook.Url, ShouldEqual, "http://localhost:3000")
})
})
})
})
}
package alerting
import (
"time"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/log"
m "github.com/grafana/grafana/pkg/models"
)
type ResultHandler interface {
Handle(result *AlertResult)
}
type ResultHandlerImpl struct {
notifier Notifier
log log.Logger
}
func NewResultHandler() *ResultHandlerImpl {
return &ResultHandlerImpl{
log: log.New("alerting.responseHandler"),
notifier: NewNotifier(),
}
}
func (handler *ResultHandlerImpl) Handle(result *AlertResult) {
if handler.shouldUpdateState(result) {
cmd := &m.UpdateAlertStateCommand{
AlertId: result.AlertJob.Rule.Id,
NewState: result.State,
Info: result.Description,
OrgId: result.AlertJob.Rule.OrgId,
TriggeredAlerts: simplejson.NewFromAny(result.TriggeredAlerts),
}
if err := bus.Dispatch(cmd); err != nil {
handler.log.Error("Failed to save state", "error", err)
}
handler.log.Debug("will notify about new state", "new state", result.State)
handler.notifier.Notify(result)
}
}
func (handler *ResultHandlerImpl) shouldUpdateState(result *AlertResult) bool {
query := &m.GetLastAlertStateQuery{
AlertId: result.AlertJob.Rule.Id,
OrgId: result.AlertJob.Rule.OrgId,
}
if err := bus.Dispatch(query); err != nil {
log.Error2("Failed to read last alert state", "error", err)
return false
}
if query.Result == nil {
return true
}
lastExecution := query.Result.Created
asdf := result.ExeuctionTime.Add(time.Minute * -15)
olderThen15Min := lastExecution.Before(asdf)
changedState := query.Result.NewState != result.State
return changedState || olderThen15Min
}
package alerting
import (
"testing"
"time"
"github.com/grafana/grafana/pkg/bus"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/alerting/alertstates"
. "github.com/smartystreets/goconvey/convey"
)
func TestAlertResultHandler(t *testing.T) {
Convey("Test result Handler", t, func() {
resultHandler := ResultHandlerImpl{}
mockResult := &AlertResult{
State: alertstates.Ok,
AlertJob: &AlertJob{
Rule: &AlertRule{
Id: 1,
OrgId: 1,
},
},
}
mockAlertState := &m.AlertState{}
bus.ClearBusHandlers()
bus.AddHandler("test", func(query *m.GetLastAlertStateQuery) error {
query.Result = mockAlertState
return nil
})
Convey("Should update", func() {
Convey("when no earlier alert state", func() {
mockAlertState = nil
So(resultHandler.shouldUpdateState(mockResult), ShouldBeTrue)
})
Convey("alert state have changed", func() {
mockAlertState = &m.AlertState{
NewState: alertstates.Critical,
}
mockResult.State = alertstates.Ok
So(resultHandler.shouldUpdateState(mockResult), ShouldBeTrue)
})
Convey("last alert state was 15min ago", func() {
now := time.Now()
mockAlertState = &m.AlertState{
NewState: alertstates.Critical,
Created: now.Add(time.Minute * -30),
}
mockResult.State = alertstates.Critical
mockResult.ExeuctionTime = time.Now()
So(resultHandler.shouldUpdateState(mockResult), ShouldBeTrue)
})
})
})
}
......@@ -23,11 +23,14 @@ var tmplWelcomeOnSignUp = "welcome_on_signup.html"
func Init() error {
initMailQueue()
initWebhookQueue()
bus.AddHandler("email", sendResetPasswordEmail)
bus.AddHandler("email", validateResetPasswordCode)
bus.AddHandler("email", sendEmailCommandHandler)
bus.AddHandler("webhook", sendWebhook)
bus.AddEventListener(signUpStartedHandler)
bus.AddEventListener(signUpCompletedHandler)
......@@ -53,6 +56,17 @@ func Init() error {
return nil
}
func sendWebhook(cmd *m.SendWebhook) error {
addToWebhookQueue(&Webhook{
Url: cmd.Url,
User: cmd.User,
Password: cmd.Password,
Body: cmd.Body,
})
return nil
}
func subjectTemplateFunc(obj map[string]interface{}, value string) string {
obj["value"] = value
return ""
......
......@@ -9,6 +9,12 @@ import (
. "github.com/smartystreets/goconvey/convey"
)
type testTriggeredAlert struct {
ActualValue float64
Name string
State string
}
func TestNotifications(t *testing.T) {
Convey("Given the notifications service", t, func() {
......@@ -34,6 +40,84 @@ func TestNotifications(t *testing.T) {
So(sentMsg.Subject, ShouldEqual, "Reset your Grafana password - asd@asd.com")
So(sentMsg.Body, ShouldNotContainSubstring, "Subject")
})
Convey("Alert notifications", func() {
Convey("When sending reset email password", func() {
cmd := &m.SendEmailCommand{
Data: map[string]interface{}{
"Name": "Name",
"State": "Critical",
"Description": "Description",
"DashboardLink": "http://localhost:3000/dashboard/db/alerting",
"AlertPageUrl": "http://localhost:3000/alerting",
"DashboardImage": "http://localhost:3000/render/dashboard-solo/db/alerting?from=1466169458375&to=1466171258375&panelId=1&width=1000&height=500",
"TriggeredAlerts": []testTriggeredAlert{
{Name: "desktop", State: "Critical", ActualValue: 13},
{Name: "mobile", State: "Warn", ActualValue: 5},
},
},
To: []string{"asd@asd.com "},
Template: "alert_notification.html",
}
err := sendEmailCommandHandler(cmd)
So(err, ShouldBeNil)
So(sentMsg.Body, ShouldContainSubstring, "Alertstate: Critical")
So(sentMsg.Body, ShouldContainSubstring, "http://localhost:3000/dashboard/db/alerting")
So(sentMsg.Body, ShouldContainSubstring, "Critical")
So(sentMsg.Body, ShouldContainSubstring, "Warn")
So(sentMsg.Body, ShouldContainSubstring, "mobile")
So(sentMsg.Body, ShouldContainSubstring, "desktop")
So(sentMsg.Subject, ShouldContainSubstring, "Grafana Alert: [ Critical ] ")
})
Convey("given critical", func() {
cmd := &m.SendEmailCommand{
Data: map[string]interface{}{
"Name": "Name",
"State": "Warn",
"Description": "Description",
"DashboardLink": "http://localhost:3000/dashboard/db/alerting",
"DashboardImage": "http://localhost:3000/render/dashboard-solo/db/alerting?from=1466169458375&to=1466171258375&panelId=1&width=1000&height=500",
"AlertPageUrl": "http://localhost:3000/alerting",
"TriggeredAlerts": []testTriggeredAlert{
{Name: "desktop", State: "Critical", ActualValue: 13},
{Name: "mobile", State: "Warn", ActualValue: 5},
},
},
To: []string{"asd@asd.com "},
Template: "alert_notification.html",
}
err := sendEmailCommandHandler(cmd)
So(err, ShouldBeNil)
So(sentMsg.Body, ShouldContainSubstring, "Alertstate: Warn")
So(sentMsg.Body, ShouldContainSubstring, "http://localhost:3000/dashboard/db/alerting")
So(sentMsg.Body, ShouldContainSubstring, "Critical")
So(sentMsg.Body, ShouldContainSubstring, "Warn")
So(sentMsg.Body, ShouldContainSubstring, "mobile")
So(sentMsg.Body, ShouldContainSubstring, "desktop")
So(sentMsg.Subject, ShouldContainSubstring, "Grafana Alert: [ Warn ]")
})
Convey("given ok", func() {
cmd := &m.SendEmailCommand{
Data: map[string]interface{}{
"Name": "Name",
"State": "Ok",
"Description": "Description",
"DashboardLink": "http://localhost:3000/dashboard/db/alerting",
"AlertPageUrl": "http://localhost:3000/alerting",
},
To: []string{"asd@asd.com "},
Template: "alert_notification.html",
}
err := sendEmailCommandHandler(cmd)
So(err, ShouldBeNil)
So(sentMsg.Subject, ShouldContainSubstring, "Grafana Alert: [ Ok ]")
})
})
})
}
package notifications
import (
"io/ioutil"
"testing"
"github.com/grafana/grafana/pkg/bus"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/setting"
. "github.com/smartystreets/goconvey/convey"
)
func TestEmailIntegrationTest(t *testing.T) {
SkipConvey("Given the notifications service", t, func() {
bus.ClearBusHandlers()
setting.StaticRootPath = "../../../public/"
setting.Smtp.Enabled = true
setting.Smtp.TemplatesPattern = "emails/*.html"
setting.Smtp.FromAddress = "from@address.com"
err := Init()
So(err, ShouldBeNil)
addToMailQueue = func(msg *Message) {
ioutil.WriteFile("../../../tmp/test_email.html", []byte(msg.Body), 0777)
}
Convey("When sending reset email password", func() {
cmd := &m.SendEmailCommand{
Data: map[string]interface{}{
"Name": "Name",
"State": "Critical",
"Description": "Description",
"DashboardLink": "http://localhost:3000/dashboard/db/alerting",
"AlertPageUrl": "http://localhost:3000/alerting",
"DashboardImage": "http://localhost:3000/render/dashboard-solo/db/alerting?from=1466169458375&to=1466171258375&panelId=3&width=1000&height=500",
"TriggeredAlerts": []testTriggeredAlert{
{Name: "desktop", State: "Critical", ActualValue: 13},
{Name: "mobile", State: "Warn", ActualValue: 5},
},
},
To: []string{"asd@asd.com "},
Template: "alert_notification.html",
}
err := sendEmailCommandHandler(cmd)
So(err, ShouldBeNil)
})
})
}
package notifications
import (
"bytes"
"net/http"
"time"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/util"
)
type Webhook struct {
Url string
User string
Password string
Body string
}
var webhookQueue chan *Webhook
var webhookLog log.Logger
func initWebhookQueue() {
webhookLog = log.New("notifications.webhook")
webhookQueue = make(chan *Webhook, 10)
go processWebhookQueue()
}
func processWebhookQueue() {
for {
select {
case webhook := <-webhookQueue:
err := sendWebRequest(webhook)
if err != nil {
webhookLog.Error("Failed to send webrequest ", "error", err)
}
}
}
}
func sendWebRequest(webhook *Webhook) error {
client := http.Client{
Timeout: time.Duration(3 * time.Second),
}
request, err := http.NewRequest("POST", webhook.Url, bytes.NewReader([]byte(webhook.Body)))
if webhook.User != "" && webhook.Password != "" {
request.Header.Add("Authorization", util.GetBasicAuthHeader(webhook.User, webhook.Password))
}
if err != nil {
return err
}
resp, err := client.Do(request)
if err != nil {
return err
}
defer resp.Body.Close()
return nil
}
var addToWebhookQueue = func(msg *Webhook) {
webhookQueue <- msg
}
package sqlstore
import (
"bytes"
"fmt"
"strconv"
"time"
"github.com/go-xorm/xorm"
"github.com/grafana/grafana/pkg/bus"
m "github.com/grafana/grafana/pkg/models"
)
func init() {
bus.AddHandler("sql", AlertNotificationQuery)
bus.AddHandler("sql", CreateAlertNotificationCommand)
bus.AddHandler("sql", UpdateAlertNotification)
bus.AddHandler("sql", DeleteAlertNotification)
}
func DeleteAlertNotification(cmd *m.DeleteAlertNotificationCommand) error {
return inTransaction(func(sess *xorm.Session) error {
sql := "DELETE FROM alert_notification WHERE alert_notification.org_id = ? AND alert_notification.id = ?"
_, err := sess.Exec(sql, cmd.OrgId, cmd.Id)
if err != nil {
return err
}
return nil
})
}
func AlertNotificationQuery(query *m.GetAlertNotificationQuery) error {
return getAlertNotifications(query, x.NewSession())
}
func getAlertNotifications(query *m.GetAlertNotificationQuery, sess *xorm.Session) error {
var sql bytes.Buffer
params := make([]interface{}, 0)
sql.WriteString(`SELECT
alert_notification.id,
alert_notification.org_id,
alert_notification.name,
alert_notification.type,
alert_notification.created,
alert_notification.updated,
alert_notification.settings,
alert_notification.always_execute
FROM alert_notification
`)
sql.WriteString(` WHERE alert_notification.org_id = ?`)
params = append(params, query.OrgID)
if query.Name != "" {
sql.WriteString(` AND alert_notification.name = ?`)
params = append(params, query.Name)
}
if query.Id != 0 {
sql.WriteString(` AND alert_notification.id = ?`)
params = append(params, strconv.Itoa(int(query.Id)))
}
if len(query.Ids) > 0 {
sql.WriteString(` AND (`)
for i, id := range query.Ids {
if i != 0 {
sql.WriteString(` OR`)
}
sql.WriteString(` alert_notification.id = ?`)
params = append(params, id)
}
sql.WriteString(`)`)
}
var searches []*m.AlertNotification
if err := sess.Sql(sql.String(), params...).Find(&searches); err != nil {
return err
}
var result []*m.AlertNotification
var def []*m.AlertNotification
if query.IncludeAlwaysExecute {
if err := sess.Where("org_id = ? AND always_execute = 1", query.OrgID).Find(&def); err != nil {
return err
}
result = append(result, def...)
}
for _, s := range searches {
canAppend := true
for _, d := range result {
if d.Id == s.Id {
canAppend = false
break
}
}
if canAppend {
result = append(result, s)
}
}
query.Result = result
return nil
}
func CreateAlertNotificationCommand(cmd *m.CreateAlertNotificationCommand) error {
return inTransaction(func(sess *xorm.Session) error {
existingQuery := &m.GetAlertNotificationQuery{OrgID: cmd.OrgID, Name: cmd.Name, IncludeAlwaysExecute: false}
err := getAlertNotifications(existingQuery, sess)
if err != nil {
return err
}
if len(existingQuery.Result) > 0 {
return fmt.Errorf("Alert notification name %s already exists", cmd.Name)
}
alertNotification := &m.AlertNotification{
OrgId: cmd.OrgID,
Name: cmd.Name,
Type: cmd.Type,
Created: time.Now(),
Settings: cmd.Settings,
Updated: time.Now(),
AlwaysExecute: cmd.AlwaysExecute,
}
_, err = sess.Insert(alertNotification)
if err != nil {
return err
}
cmd.Result = alertNotification
return nil
})
}
func UpdateAlertNotification(cmd *m.UpdateAlertNotificationCommand) error {
return inTransaction(func(sess *xorm.Session) (err error) {
current := &m.AlertNotification{}
_, err = sess.Id(cmd.Id).Get(current)
if err != nil {
return err
}
alertNotification := &m.AlertNotification{
Id: cmd.Id,
OrgId: cmd.OrgID,
Name: cmd.Name,
Type: cmd.Type,
Settings: cmd.Settings,
Updated: time.Now(),
Created: current.Created,
AlwaysExecute: cmd.AlwaysExecute,
}
sess.UseBool("always_execute")
var affected int64
affected, err = sess.Id(alertNotification.Id).Update(alertNotification)
if err != nil {
return err
}
if affected == 0 {
return fmt.Errorf("Could not find alert notification")
}
cmd.Result = alertNotification
return nil
})
}
package sqlstore
import (
"fmt"
"testing"
"github.com/grafana/grafana/pkg/components/simplejson"
m "github.com/grafana/grafana/pkg/models"
. "github.com/smartystreets/goconvey/convey"
)
func TestAlertNotificationSQLAccess(t *testing.T) {
Convey("Testing Alert notification sql access", t, func() {
InitTestDB(t)
var err error
Convey("Alert notifications should be empty", func() {
cmd := &m.GetAlertNotificationQuery{
OrgID: FakeOrgId,
Name: "email",
}
err := AlertNotificationQuery(cmd)
fmt.Printf("errror %v", err)
So(err, ShouldBeNil)
So(len(cmd.Result), ShouldEqual, 0)
})
Convey("Can save Alert Notification", func() {
cmd := &m.CreateAlertNotificationCommand{
Name: "ops",
Type: "email",
OrgID: 1,
Settings: simplejson.New(),
AlwaysExecute: true,
}
err = CreateAlertNotificationCommand(cmd)
So(err, ShouldBeNil)
So(cmd.Result.Id, ShouldNotEqual, 0)
So(cmd.Result.OrgId, ShouldNotEqual, 0)
So(cmd.Result.Type, ShouldEqual, "email")
So(cmd.Result.AlwaysExecute, ShouldEqual, true)
Convey("Cannot save Alert Notification with the same name", func() {
err = CreateAlertNotificationCommand(cmd)
So(err, ShouldNotBeNil)
})
Convey("Can update alert notification", func() {
newCmd := &m.UpdateAlertNotificationCommand{
Name: "NewName",
Type: "webhook",
OrgID: cmd.Result.OrgId,
Settings: simplejson.New(),
Id: cmd.Result.Id,
AlwaysExecute: true,
}
err := UpdateAlertNotification(newCmd)
So(err, ShouldBeNil)
So(newCmd.Result.Name, ShouldEqual, "NewName")
})
})
Convey("Can search using an array of ids", func() {
So(CreateAlertNotificationCommand(&m.CreateAlertNotificationCommand{
Name: "nagios",
Type: "webhook",
OrgID: 1,
Settings: simplejson.New(),
AlwaysExecute: true,
}), ShouldBeNil)
So(CreateAlertNotificationCommand(&m.CreateAlertNotificationCommand{
Name: "ops2",
Type: "email",
OrgID: 1,
Settings: simplejson.New(),
}), ShouldBeNil)
So(CreateAlertNotificationCommand(&m.CreateAlertNotificationCommand{
Name: "slack",
Type: "webhook",
OrgID: 1,
Settings: simplejson.New(),
}), ShouldBeNil)
Convey("search", func() {
existingNotification := int64(2)
missingThatSholdNotCauseerrors := int64(99)
query := &m.GetAlertNotificationQuery{
Ids: []int64{existingNotification, missingThatSholdNotCauseerrors},
OrgID: 1,
IncludeAlwaysExecute: true,
}
err := AlertNotificationQuery(query)
So(err, ShouldBeNil)
So(len(query.Result), ShouldEqual, 2)
defaultNotifications := 0
for _, not := range query.Result {
if not.AlwaysExecute {
defaultNotifications++
}
}
So(defaultNotifications, ShouldEqual, 1)
})
})
})
}
......@@ -12,6 +12,23 @@ import (
func init() {
bus.AddHandler("sql", SetNewAlertState)
bus.AddHandler("sql", GetAlertStateLogByAlertId)
bus.AddHandler("sql", GetLastAlertStateQuery)
}
func GetLastAlertStateQuery(cmd *m.GetLastAlertStateQuery) error {
states := make([]m.AlertState, 0)
if err := x.Where("alert_id = ? and org_id = ? ", cmd.AlertId, cmd.OrgId).Desc("created").Find(&states); err != nil {
return err
}
if len(states) == 0 {
cmd.Result = nil
return nil
}
cmd.Result = &states[0]
return nil
}
func SetNewAlertState(cmd *m.UpdateAlertStateCommand) error {
......@@ -30,20 +47,16 @@ func SetNewAlertState(cmd *m.UpdateAlertStateCommand) error {
return fmt.Errorf("Could not find alert")
}
if alert.State == cmd.NewState {
cmd.Result = &m.Alert{}
return nil
}
alert.State = cmd.NewState
sess.Id(alert.Id).Update(&alert)
alertState := m.AlertState{
AlertId: cmd.AlertId,
OrgId: cmd.AlertId,
OrgId: cmd.OrgId,
NewState: cmd.NewState,
Info: cmd.Info,
Created: time.Now(),
TriggeredAlerts: cmd.TriggeredAlerts,
}
sess.Insert(&alertState)
......@@ -54,12 +67,12 @@ func SetNewAlertState(cmd *m.UpdateAlertStateCommand) error {
}
func GetAlertStateLogByAlertId(cmd *m.GetAlertsStateQuery) error {
alertLogs := make([]m.AlertState, 0)
states := make([]m.AlertState, 0)
if err := x.Where("alert_id = ?", cmd.AlertId).Desc("created").Find(&alertLogs); err != nil {
if err := x.Where("alert_id = ?", cmd.AlertId).Desc("created").Find(&states); err != nil {
return err
}
cmd.Result = &alertLogs
cmd.Result = &states
return nil
}
......@@ -249,10 +249,10 @@ func GetDashboards(query *m.GetDashboardsQuery) error {
return m.ErrCommandValidationFailed
}
var dashboards = make([]m.Dashboard, 0)
var dashboards = make([]*m.Dashboard, 0)
err := x.In("id", query.DashboardIds).Find(&dashboards)
query.Result = &dashboards
query.Result = dashboards
if err != nil {
return err
......
......@@ -49,6 +49,7 @@ func addAlertMigrations(mg *Migrator) {
{Name: "org_id", Type: DB_BigInt, Nullable: false},
{Name: "new_state", Type: DB_NVarchar, Length: 50, Nullable: false},
{Name: "info", Type: DB_Text, Nullable: true},
{Name: "triggered_alerts", Type: DB_Text, Nullable: true},
{Name: "created", Type: DB_DateTime, Nullable: false},
},
}
......@@ -66,4 +67,20 @@ func addAlertMigrations(mg *Migrator) {
}
mg.AddMigration("create alert_heartbeat table v1", NewAddTableMigration(alert_heartbeat))
alert_notification := Table{
Name: "alert_notification",
Columns: []*Column{
{Name: "id", Type: DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true},
{Name: "org_id", Type: DB_BigInt, Nullable: false},
{Name: "name", Type: DB_NVarchar, Length: 255, Nullable: false},
{Name: "type", Type: DB_NVarchar, Length: 255, Nullable: false},
{Name: "always_execute", Type: DB_Bool, Nullable: false},
{Name: "settings", Type: DB_Text, Nullable: false},
{Name: "created", Type: DB_DateTime, Nullable: false},
{Name: "updated", Type: DB_DateTime, Nullable: false},
},
}
mg.AddMigration("create alert_notification table v1", NewAddTableMigration(alert_notification))
}
......@@ -199,6 +199,24 @@ function setupAngularRoutes($routeProvider, $locationProvider) {
controllerAs: 'ctrl',
resolve: loadAlertingBundle,
})
.when('/alerting/notifications', {
templateUrl: 'public/app/features/alerting/partials/notifications_list.html',
controller: 'AlertNotificationsListCtrl',
controllerAs: 'ctrl',
resolve: loadAlertingBundle,
})
.when('/alerting/notification/new', {
templateUrl: 'public/app/features/alerting/partials/notification_edit.html',
controller: 'AlertNotificationEditCtrl',
controllerAs: 'ctrl',
resolve: loadAlertingBundle,
})
.when('/alerting/notification/:notificationId/edit', {
templateUrl: 'public/app/features/alerting/partials/notification_edit.html',
controller: 'AlertNotificationEditCtrl',
controllerAs: 'ctrl',
resolve: loadAlertingBundle,
})
.when('/alerting/:alertId/states', {
templateUrl: 'public/app/features/alerting/partials/alert_log.html',
controller: 'AlertLogCtrl',
......
import './alerts_ctrl';
import './alert_log_ctrl';
import './notifications_list_ctrl';
import './notification_edit_ctrl';
///<reference path="../../headers/common.d.ts" />
import angular from 'angular';
import _ from 'lodash';
import coreModule from '../../core/core_module';
import config from 'app/core/config';
export class AlertNotificationEditCtrl {
notification: any;
/** @ngInject */
constructor(private $routeParams, private backendSrv, private $scope) {
if ($routeParams.notificationId) {
this.loadNotification($routeParams.notificationId);
} else {
this.notification = {
settings: {
sendCrit: true,
sendWarn: true,
}
};
}
}
loadNotification(notificationId) {
this.backendSrv.get(`/api/alerts/notification/${notificationId}`).then(result => {
console.log(result);
this.notification = result;
});
}
isNew() {
return this.notification === undefined || this.notification.id === undefined;
}
save() {
if (this.notification.id) {
console.log('this.notification: ', this.notification);
this.backendSrv.put(`/api/alerts/notification/${this.notification.id}`, this.notification)
.then(result => {
this.notification = result;
this.$scope.appEvent('alert-success', ['Notification created!', '']);
}, () => {
this.$scope.appEvent('alert-error', ['Unable to create notification.', '']);
});
} else {
this.backendSrv.post(`/api/alerts/notification`, this.notification)
.then(result => {
this.notification = result;
this.$scope.appEvent('alert-success', ['Notification updated!', '']);
}, () => {
this.$scope.appEvent('alert-error', ['Unable to update notification.', '']);
});
}
}
}
coreModule.controller('AlertNotificationEditCtrl', AlertNotificationEditCtrl);
///<reference path="../../headers/common.d.ts" />
import angular from 'angular';
import _ from 'lodash';
import coreModule from '../../core/core_module';
import config from 'app/core/config';
export class AlertNotificationsListCtrl {
notifications: any;
/** @ngInject */
constructor(private backendSrv, private $scope) {
this.loadNotifications();
}
loadNotifications() {
this.backendSrv.get(`/api/alerts/notifications`).then(result => {
this.notifications = result;
});
}
deleteNotification(notificationId) {
this.backendSrv.delete(`/api/alerts/notification/${notificationId}`)
.then(() => {
this.notifications = this.notifications.filter(notification => {
return notification.id !== notificationId;
});
this.$scope.appEvent('alert-success', ['Notification deleted', '']);
}, () => {
this.$scope.appEvent('alert-error', ['Unable to delete notification', '']);
});
}
}
coreModule.controller('AlertNotificationsListCtrl', AlertNotificationsListCtrl);
<navbar icon="fa fa-fw fa-list" title="Alerting" title-url="alerting">
</navbar>
<div class="page-container" >
<div class="page-header">
<h1>Alert notification</h1>
</div>
<div class="gf-form-group section">
<div class="gf-form">
<span class="gf-form-label width-8">Name</span>
<input type="text" class="gf-form-input max-width-12" ng-model="ctrl.notification.name"></input>
</div>
<div class="gf-form">
<span class="gf-form-label width-8">Type</span>
<div class="gf-form-select-wrapper width-12">
<select class="gf-form-input"
ng-model="ctrl.notification.type"
ng-options="t for t in ['webhook', 'email']"
ng-change="ctrl.typeChanged(notification, $index)">
</select>
</div>
</div>
<div class="gf-form">
<gf-form-switch class="gf-form" label-class="width-8" label="Always execute" checked="ctrl.notification.alwaysExecute" on-change=""></gf-form-switch>
</div>
<div class="gf-form">
<gf-form-switch class="gf-form" label-class="width-8" label="Send Warning" checked="ctrl.notification.settings.sendWarn" on-change=""></gf-form-switch>
</div>
<div class="gf-form">
<gf-form-switch class="gf-form" label-class="width-8" label="Send Critical" checked="ctrl.notification.settings.sendCrit" on-change=""></gf-form-switch>
</div>
</div>
<div class="gf-form-group section" ng-show="ctrl.notification.type === 'webhook'">
<div class="gf-form">
<span class="gf-form-label width-6">Url</span>
<input type="text" class="gf-form-input max-width-26" ng-model="ctrl.notification.settings.url"></input>
</div>
<div class="gf-form-inline">
<div class="gf-form">
<span class="gf-form-label width-6">Username</span>
<input type="text" class="gf-form-input max-width-10" ng-model="ctrl.notification.settings.username"></input>
</div>
<div class="gf-form">
<span class="gf-form-label width-6">Password</span>
<input type="text" class="gf-form-input max-width-10" ng-model="ctrl.notification.settings.password"></input>
</div>
</div>
</div>
<div class="gf-form-group section" ng-show="ctrl.notification.type === 'email'">
<div class="gf-form">
<span class="gf-form-label width-8">To</span>
<input type="text" class="gf-form-input max-width-26" ng-model="ctrl.notification.settings.to">
</div>
</div>
<div class="gf-form-button-group">
<button ng-click="ctrl.save()" class="btn btn-success">Save</button>
</div>
</div>
<navbar icon="fa fa-fw fa-list" title="Alerting" title-url="alerting">
</navbar>
<div class="page-container" >
<div class="page-header">
<h1>Alert notifications</h1>
<a href="alerting/notification/new" class="btn btn-success pull-right">
<i class="fa fa-plus"></i>
New Notification
</a>
</div>
<table class="grafana-options-table" style="/*width: 600px;*/">
<thead>
<th style="min-width: 200px"><strong>Name</strong></th>
<th style="min-width: 100px">Type</th>
<th style="width: 1%"></th>
</thead>
<tr ng-repeat="notification in ctrl.notifications">
<td>
<a href="alerting/notification/{{notification.id}}/edit">
{{notification.name}}
</a>
</td>
<td>
{{notification.type}}
</td>
<td>
<a href="alerting/notification/{{notification.id}}/edit" class="btn btn-inverse btn-small">
<i class="fa fa-edit"></i>
edit
</a>
<a ng-click="ctrl.deleteNotification(notification.id)" class="btn btn-danger btn-small">
<i class="fa fa-remove"></i>
</a>
</td>
</tr>
</table>
</div>
......@@ -271,7 +271,6 @@ function (angular, $, moment, _, kbn, GraphTooltip, thresholds) {
function callPlot(incrementRenderCounter) {
try {
console.log('rendering');
$.plot(elem, sortedSeries, options);
} catch (e) {
console.log('flotcharts error', e);
......
......@@ -109,8 +109,11 @@
<div class="gf-form-inline">
<div class="gf-form">
<span class="gf-form-label">Groups</span>
<input class="gf-form-input max-width-7" type="text" ng-model="ctrl.alert.notify"></input>
<!--
<bootstrap-tagsinput ng-model="ctrl.alert.notify" tagclass="label label-tag" placeholder="add tags">
</bootstrap-tagsinput>
-->
</div>
</div>
</div>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment