Files
karma/cmd/karma/api_test.go
2026-03-11 13:10:15 +00:00

1065 lines
33 KiB
Go

package main
import (
"bytes"
"encoding/json"
"net/http"
"net/http/httptest"
"reflect"
"strings"
"testing"
"time"
"github.com/Masterminds/semver/v3"
"github.com/google/go-cmp/cmp"
promlabels "github.com/prometheus/prometheus/model/labels"
"github.com/prymitive/karma/internal/config"
"github.com/prymitive/karma/internal/mock"
"github.com/prymitive/karma/internal/models"
)
type groupTest struct {
stateCount map[string]int
receiver string
id string
labels promlabels.Labels
alerts []models.Alert
}
var groupTests = []groupTest{
{
receiver: "by-name",
labels: promlabels.FromStrings("alertname", "Memory_Usage_Too_High"),
alerts: []models.Alert{
{
StartsAt: time.Date(2019, time.January, 10, 0, 0, 0, 0, time.UTC),
Annotations: models.Annotations{
models.Annotation{
Visible: true,
Name: "alert",
Value: "Memory usage exceeding threshold",
},
models.Annotation{
Visible: true,
Name: "dashboard",
Value: "http://localhost/dashboard.html",
IsLink: true,
},
},
Labels: promlabels.FromStrings("cluster", "prod", "instance", "server2", "job", "node_exporter"),
State: models.AlertStateActive,
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateActive,
Source: "http://localhost/prometheus",
SilencedBy: []string{},
},
},
Receiver: "by-name",
},
},
id: "d9415cf0750fe30",
stateCount: map[string]int{
models.AlertStateActive.String(): 1,
models.AlertStateSuppressed.String(): 0,
models.AlertStateUnprocessed.String(): 0,
},
},
{
receiver: "by-cluster-service",
labels: promlabels.FromStrings("alertname", "Memory_Usage_Too_High", "cluster", "prod"),
alerts: []models.Alert{
{
StartsAt: time.Date(2019, time.January, 10, 0, 0, 0, 1, time.UTC),
Annotations: models.Annotations{
models.Annotation{
Visible: true,
Name: "alert",
Value: "Memory usage exceeding threshold",
},
models.Annotation{
Visible: true,
Name: "dashboard",
Value: "http://localhost/dashboard.html",
IsLink: true,
},
},
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateActive,
Source: "http://localhost/prometheus",
SilencedBy: []string{},
},
},
Labels: promlabels.FromStrings("instance", "server2", "job", "node_exporter"),
State: models.AlertStateActive,
Receiver: "by-cluster-service",
},
},
id: "c3405fd66145d86e",
stateCount: map[string]int{
models.AlertStateActive.String(): 1,
models.AlertStateSuppressed.String(): 0,
models.AlertStateUnprocessed.String(): 0,
},
},
{
receiver: "by-cluster-service",
labels: promlabels.FromStrings("alertname", "Host_Down", "cluster", "staging"),
alerts: []models.Alert{
{
StartsAt: time.Date(2019, time.January, 10, 0, 0, 0, 0, time.UTC),
Annotations: models.Annotations{},
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateActive,
Source: "http://localhost/prometheus",
SilencedBy: []string{},
},
},
Labels: promlabels.FromStrings("instance", "server3", "ip", "127.0.0.3"),
State: models.AlertStateActive,
Receiver: "by-cluster-service",
},
{
Annotations: models.Annotations{},
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateActive,
Source: "http://localhost/prometheus",
SilencedBy: []string{},
},
},
Labels: promlabels.FromStrings("instance", "server4", "ip", "127.0.0.4"),
State: models.AlertStateActive,
Receiver: "by-cluster-service",
},
{
Annotations: models.Annotations{},
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateActive,
Source: "http://localhost/prometheus",
SilencedBy: []string{},
},
},
Labels: promlabels.FromStrings("instance", "server5", "ip", "127.0.0.5"),
State: models.AlertStateActive,
Receiver: "by-cluster-service",
},
},
id: "13043134f43c9070",
stateCount: map[string]int{
models.AlertStateActive.String(): 3,
models.AlertStateSuppressed.String(): 0,
models.AlertStateUnprocessed.String(): 0,
},
},
{
receiver: "by-cluster-service",
labels: promlabels.FromStrings("alertname", "Host_Down", "cluster", "dev"),
alerts: []models.Alert{
{
StartsAt: time.Date(2019, time.January, 10, 1, 0, 0, 0, time.UTC),
Annotations: models.Annotations{},
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateSuppressed,
Source: "http://localhost/prometheus",
SilencedBy: []string{"168f139d-77e4-41d6-afb5-8fe2cfd0cc9d"},
},
},
Labels: promlabels.FromStrings("instance", "server6", "ip", "127.0.0.6"),
State: models.AlertStateSuppressed,
Receiver: "by-cluster-service",
},
{
StartsAt: time.Date(2019, time.January, 10, 0, 59, 0, 0, time.UTC),
Annotations: models.Annotations{},
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateSuppressed,
Source: "http://localhost/prometheus",
SilencedBy: []string{"168f139d-77e4-41d6-afb5-8fe2cfd0cc9d", "378eaa69-097d-41c4-a8c2-fe6568c3abfc"},
},
},
Labels: promlabels.FromStrings("instance", "server7", "ip", "127.0.0.7"),
State: models.AlertStateSuppressed,
Receiver: "by-cluster-service",
},
{
StartsAt: time.Date(2019, time.January, 12, 0, 0, 0, 0, time.UTC),
Annotations: models.Annotations{},
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateSuppressed,
Source: "http://localhost/prometheus",
SilencedBy: []string{"168f139d-77e4-41d6-afb5-8fe2cfd0cc9d"},
},
},
Labels: promlabels.FromStrings("instance", "server8", "ip", "127.0.0.8"),
State: models.AlertStateSuppressed,
Receiver: "by-cluster-service",
},
},
id: "1704fa283de6bc33",
stateCount: map[string]int{
models.AlertStateActive.String(): 0,
models.AlertStateSuppressed.String(): 3,
models.AlertStateUnprocessed.String(): 0,
},
},
{
receiver: "by-name",
labels: promlabels.FromStrings("alertname", "Host_Down"),
alerts: []models.Alert{
{
StartsAt: time.Date(2019, time.January, 1, 0, 0, 0, 0, time.UTC),
Annotations: models.Annotations{
models.Annotation{
Visible: true,
Name: "url",
Value: "http://localhost/example.html",
IsLink: true,
},
},
Labels: promlabels.FromStrings("cluster", "prod", "instance", "server1", "ip", "127.0.0.1"),
State: models.AlertStateActive,
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateActive,
Source: "http://localhost/prometheus",
SilencedBy: []string{},
},
},
Receiver: "by-name",
},
{
StartsAt: time.Date(2019, time.January, 1, 0, 1, 0, 0, time.UTC),
Annotations: models.Annotations{},
Labels: promlabels.FromStrings("cluster", "prod", "instance", "server2", "ip", "127.0.0.2"),
State: models.AlertStateActive,
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateActive,
Source: "http://localhost/prometheus",
SilencedBy: []string{},
},
},
Receiver: "by-name",
},
{
StartsAt: time.Date(2019, time.January, 1, 0, 1, 0, 1, time.UTC),
Annotations: models.Annotations{},
Labels: promlabels.FromStrings("cluster", "staging", "instance", "server3", "ip", "127.0.0.3"),
State: models.AlertStateActive,
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateActive,
Source: "http://localhost/prometheus",
SilencedBy: []string{},
},
},
Receiver: "by-name",
},
{
StartsAt: time.Date(2019, time.January, 1, 0, 0, 59, 0, time.UTC),
Annotations: models.Annotations{},
Labels: promlabels.FromStrings("cluster", "staging", "instance", "server4", "ip", "127.0.0.4"),
State: models.AlertStateActive,
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateActive,
Source: "http://localhost/prometheus",
SilencedBy: []string{},
},
},
Receiver: "by-name",
},
{
StartsAt: time.Date(2019, time.January, 10, 0, 0, 0, 0, time.UTC),
Annotations: models.Annotations{},
Labels: promlabels.FromStrings("cluster", "staging", "instance", "server5", "ip", "127.0.0.5"),
State: models.AlertStateActive,
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateActive,
Source: "http://localhost/prometheus",
SilencedBy: []string{},
},
},
Receiver: "by-name",
},
{
StartsAt: time.Date(2019, time.January, 10, 1, 0, 0, 0, time.UTC),
Annotations: models.Annotations{},
Labels: promlabels.FromStrings("cluster", "dev", "instance", "server6", "ip", "127.0.0.6"),
State: models.AlertStateSuppressed,
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateSuppressed,
Source: "http://localhost/prometheus",
SilencedBy: []string{"168f139d-77e4-41d6-afb5-8fe2cfd0cc9d"},
},
},
Receiver: "by-name",
},
{
StartsAt: time.Date(2019, time.January, 10, 0, 20, 0, 0, time.UTC),
Annotations: models.Annotations{},
Labels: promlabels.FromStrings("cluster", "dev", "instance", "server7", "ip", "127.0.0.7"),
State: models.AlertStateSuppressed,
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateSuppressed,
Source: "http://localhost/prometheus",
SilencedBy: []string{"168f139d-77e4-41d6-afb5-8fe2cfd0cc9d", "378eaa69-097d-41c4-a8c2-fe6568c3abfc"},
},
},
Receiver: "by-name",
},
{
StartsAt: time.Date(2019, time.January, 10, 0, 21, 0, 0, time.UTC),
Annotations: models.Annotations{},
Labels: promlabels.FromStrings("cluster", "dev", "instance", "server8", "ip", "127.0.0.8"),
State: models.AlertStateSuppressed,
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateSuppressed,
Source: "http://localhost/prometheus",
SilencedBy: []string{"168f139d-77e4-41d6-afb5-8fe2cfd0cc9d"},
},
},
Receiver: "by-name",
},
},
id: "c48b045a836d41d6",
stateCount: map[string]int{
models.AlertStateActive.String(): 5,
models.AlertStateSuppressed.String(): 3,
models.AlertStateUnprocessed.String(): 0,
},
},
{
receiver: "by-cluster-service",
labels: promlabels.FromStrings("alertname", "Free_Disk_Space_Too_Low", "cluster", "staging"),
alerts: []models.Alert{
{
StartsAt: time.Date(2019, time.January, 10, 0, 19, 0, 0, time.UTC),
Annotations: models.Annotations{
models.Annotation{
Visible: true,
Name: "alert",
Value: "Less than 10% disk space is free",
},
models.Annotation{
Visible: true,
Name: "dashboard",
Value: "http://localhost/dashboard.html",
IsLink: true,
},
},
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateActive,
Source: "http://localhost/prometheus",
SilencedBy: []string{},
},
},
Labels: promlabels.FromStrings("disk", "sda", "instance", "server5", "job", "node_exporter"),
State: models.AlertStateActive,
Receiver: "by-cluster-service",
},
},
id: "6dbbdb0f1e75b835",
stateCount: map[string]int{
models.AlertStateActive.String(): 1,
models.AlertStateSuppressed.String(): 0,
models.AlertStateUnprocessed.String(): 0,
},
},
{
receiver: "by-cluster-service",
labels: promlabels.FromStrings("alertname", "Host_Down", "cluster", "prod"),
alerts: []models.Alert{
{
StartsAt: time.Date(2019, time.January, 12, 0, 19, 0, 0, time.UTC),
Annotations: models.Annotations{
models.Annotation{
Visible: true,
Name: "url",
Value: "http://localhost/example.html",
IsLink: true,
},
},
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateActive,
Source: "http://localhost/prometheus",
SilencedBy: []string{},
},
},
Labels: promlabels.FromStrings("instance", "server1", "ip", "127.0.0.1"),
State: models.AlertStateActive,
Receiver: "by-cluster-service",
},
{
Annotations: models.Annotations{},
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateActive,
Source: "http://localhost/prometheus",
SilencedBy: []string{},
},
},
Labels: promlabels.FromStrings("instance", "server2", "ip", "127.0.0.2"),
State: models.AlertStateActive,
Receiver: "by-cluster-service",
},
},
id: "7cc564cafcfd4d9",
stateCount: map[string]int{
models.AlertStateActive.String(): 2,
models.AlertStateSuppressed.String(): 0,
models.AlertStateUnprocessed.String(): 0,
},
},
{
receiver: "by-name",
labels: promlabels.FromStrings("alertname", "HTTP_Probe_Failed"),
alerts: []models.Alert{
{
StartsAt: time.Date(2019, time.January, 14, 0, 0, 0, 0, time.UTC),
Annotations: models.Annotations{
models.Annotation{
Visible: true,
Name: "help",
Value: "Example help annotation",
},
models.Annotation{
Visible: true,
Name: "url",
Value: "http://localhost/example.html",
IsLink: true,
},
},
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateSuppressed,
Source: "http://localhost/prometheus",
},
},
Labels: promlabels.FromStrings("instance", "web1"),
State: models.AlertStateSuppressed,
Receiver: "by-name",
},
{
StartsAt: time.Date(2019, time.January, 14, 0, 0, 0, 0, time.UTC),
Annotations: models.Annotations{},
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateActive,
Source: "http://localhost/prometheus",
SilencedBy: []string{},
},
},
Labels: promlabels.FromStrings("instance", "web2"),
State: models.AlertStateActive,
Receiver: "by-name",
},
},
id: "ced38b93167636b",
stateCount: map[string]int{
models.AlertStateActive.String(): 1,
models.AlertStateSuppressed.String(): 1,
models.AlertStateUnprocessed.String(): 0,
},
},
{
receiver: "by-name",
labels: promlabels.FromStrings("alertname", "Free_Disk_Space_Too_Low"),
alerts: []models.Alert{
{
StartsAt: time.Date(2019, time.January, 15, 0, 0, 0, 0, time.UTC),
Annotations: models.Annotations{
models.Annotation{
Visible: true,
Name: "alert",
Value: "Less than 10% disk space is free",
},
models.Annotation{
Visible: true,
Name: "dashboard",
Value: "http://localhost/dashboard.html",
IsLink: true,
},
},
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateActive,
Source: "http://localhost/prometheus",
SilencedBy: []string{},
},
},
Labels: promlabels.FromStrings("cluster", "staging", "disk", "sda", "instance", "server5", "job", "node_exporter"),
State: models.AlertStateActive,
Receiver: "by-name",
},
},
id: "aa4f5d45db158b4e",
stateCount: map[string]int{
models.AlertStateActive.String(): 1,
models.AlertStateSuppressed.String(): 0,
models.AlertStateUnprocessed.String(): 0,
},
},
{
receiver: "by-cluster-service",
labels: promlabels.FromStrings("alertname", "HTTP_Probe_Failed", "cluster", "dev"),
alerts: []models.Alert{
{
StartsAt: time.Date(2019, time.January, 10, 20, 0, 0, 0, time.UTC),
Annotations: models.Annotations{
models.Annotation{
Visible: true,
Name: "help",
Value: "Example help annotation",
},
models.Annotation{
Visible: true,
Name: "url",
Value: "http://localhost/example.html",
IsLink: true,
},
},
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateSuppressed,
Source: "http://localhost/prometheus",
SilencedBy: []string{"0804764c-6163-4c64-b0a9-08feebe2db4b"},
},
},
Labels: promlabels.FromStrings("instance", "web1"),
State: models.AlertStateSuppressed,
Receiver: "by-cluster-service",
},
{
StartsAt: time.Date(2019, time.January, 10, 19, 0, 0, 0, time.UTC),
Annotations: models.Annotations{},
Alertmanager: []models.AlertmanagerInstance{
{
Name: "default",
State: models.AlertStateActive,
Source: "http://localhost/prometheus",
SilencedBy: []string{},
},
},
Labels: promlabels.FromStrings("instance", "web2"),
State: models.AlertStateActive,
Receiver: "by-cluster-service",
},
},
id: "e764048aca571a6c",
stateCount: map[string]int{
models.AlertStateActive.String(): 1,
models.AlertStateSuppressed.String(): 1,
models.AlertStateUnprocessed.String(): 0,
},
},
}
var filtersExpected = []models.Filter{}
func compareAlertGroups(testCase groupTest, group models.APIAlertGroup) bool {
if testCase.receiver != group.Receiver {
return false
}
if testCase.labels.Len() != len(group.Labels) {
return false
}
match := true
testCase.labels.Range(func(l promlabels.Label) {
if group.Labels.Get(l.Name) != l.Value {
match = false
}
})
return match
}
func compareAlerts(expectedAlert models.Alert, gotAlert models.APIAlert) bool {
if gotAlert.Receiver != expectedAlert.Receiver {
return false
}
if expectedAlert.Labels.Len() != len(gotAlert.Labels) {
return false
}
match := true
expectedAlert.Labels.Range(func(l promlabels.Label) {
if gotAlert.Labels.Get(l.Name) != l.Value {
match = false
}
})
return match
}
func testAlert(version string, t *testing.T, expectedAlert models.Alert, gotAlert models.APIAlert) {
if gotAlert.Receiver != expectedAlert.Receiver {
t.Errorf("[%s] Expected '%s' receiver but got '%s' on alert labels=%v",
version, expectedAlert.Receiver, gotAlert.Receiver, expectedAlert.Labels)
}
if gotAlert.State != expectedAlert.State.String() {
t.Errorf("[%s] Expected state '%s' but got '%s' on alert receiver='%s' labels=%v",
version, expectedAlert.State, gotAlert.State, gotAlert.Receiver, expectedAlert.Labels)
}
if !reflect.DeepEqual(gotAlert.Annotations, expectedAlert.Annotations) {
t.Errorf("[%s] Annotation mismatch on alert receiver='%s' labels=%v, expected %v but got %v",
version, expectedAlert.Receiver, expectedAlert.Labels, expectedAlert.Annotations, gotAlert.Annotations)
}
expectedLabels := models.LabelsToOrderedLabels(expectedAlert.Labels)
if !reflect.DeepEqual(gotAlert.Labels, expectedLabels) {
t.Errorf("[%s] Labels mismatch on alert receiver='%s', expected labels=%v but got %v",
version, expectedAlert.Receiver, expectedLabels, gotAlert.Labels)
}
if len(gotAlert.Alertmanager) != len(expectedAlert.Alertmanager) {
t.Errorf("[%s] Expected %d alertmanager instances but got %d on alert receiver='%s' labels=%v",
version, len(expectedAlert.Alertmanager), len(gotAlert.Alertmanager), gotAlert.Receiver, expectedAlert.Labels)
}
for _, expectedAM := range expectedAlert.Alertmanager {
found := false
for _, gotAM := range gotAlert.Alertmanager {
if gotAM.Name == expectedAM.Name {
found = true
if gotAM.State != expectedAM.State {
t.Errorf("[%s] Expected alertmanager '%s' to have state '%s' but got '%s' on alert receiver='%s' labels=%v",
version, expectedAM.Name, expectedAM.State, gotAM.State, gotAlert.Receiver, expectedAlert.Labels)
}
if gotAM.Source != expectedAM.Source {
t.Errorf("[%s] Expected alertmanager '%s' to have source '%s' but got '%s' on alert receiver='%s' labels=%v",
version, expectedAM.Name, expectedAM.Source, gotAM.Source, gotAlert.Receiver, expectedAlert.Labels)
}
// multiple silences only work for >=0.6.1
versionRange, err := semver.NewConstraint(">=0.6.1")
if err != nil {
t.Errorf("[%s] Cannot create semver Constrain: %s", version, err)
}
if versionRange.Check(semver.MustParse(version)) {
if len(gotAM.Silences) != len(expectedAM.Silences) {
t.Errorf("[%s] Expected alertmanager '%s' to have %d silences but got %d on alert receiver='%s' labels=%v",
version, expectedAM.Name, len(expectedAM.Silences), len(gotAM.Silences), gotAlert.Receiver, expectedAlert.Labels)
}
for _, es := range expectedAM.Silences {
foundSilence := false
for _, gs := range gotAM.Silences {
if es.Comment == gs.Comment &&
es.CreatedBy == gs.CreatedBy &&
es.TicketID == gs.TicketID &&
es.TicketURL == gs.TicketURL {
foundSilence = true
}
}
if !foundSilence {
t.Errorf("[%s] Silence %v not found on alertmanager '%s' on alert receiver='%s' labels=%v",
version, es, expectedAM.Name, expectedAlert.Receiver, expectedAlert.Labels)
}
}
}
break
}
}
if !found {
t.Errorf("[%s] Alertmanager instances '%s' not found on alert receiver='%s' labels=%v",
version, expectedAM.Name, gotAlert.Receiver, expectedAlert.Labels)
}
}
}
func testAlertGroup(version string, t *testing.T, testCase groupTest, group models.APIAlertGroup) {
if testCase.id != group.ID {
t.Errorf("[%s] Alert group.ID mismatch, expected '%s' but got '%s' for group %v",
version, testCase.id, group.ID, group.Labels)
}
for key, val := range testCase.stateCount {
v, found := group.StateCount[key]
if !found {
t.Errorf("[%s] Expected group.StateCount[%s]=%d not found", version, key, val)
} else if v != val {
t.Errorf("[%s] group.StateCount[%s] mismatch, expected %d but got %d", version, key, val, v)
}
}
if len(testCase.alerts) != len(group.Alerts) {
t.Errorf("[%s] Expected %d alert(s) but got %d on alert group receiver='%s' labels=%v",
version, len(testCase.alerts), len(group.Alerts), testCase.receiver, testCase.labels)
}
for _, expectedAlert := range testCase.alerts {
alertFound := false
for _, alert := range group.Alerts {
match := compareAlerts(expectedAlert, alert)
if match {
alertFound = true
testAlert(version, t, expectedAlert, alert)
}
}
if !alertFound {
t.Errorf("[%s] Expected alert receiver='%s' labels=%v not found in group: %v",
version, expectedAlert.Receiver, expectedAlert.Labels, group.Alerts)
}
}
}
func TestVerifyAllGroups(t *testing.T) {
payload, err := json.Marshal(models.AlertsRequest{
Filters: []string{},
GridLimits: map[string]int{},
DefaultGroupLimit: 50,
})
if err != nil {
t.Error(err)
t.FailNow()
}
mockConfig(t.Setenv)
for _, version := range mock.ListAllMocks() {
t.Logf("Testing API using mock files from Alertmanager %s", version)
mockAlerts(version)
apiCache.Purge()
r := testRouter()
setupRouter(r, nil)
req := httptest.NewRequest("POST", "/alerts.json", bytes.NewReader(payload))
resp := httptest.NewRecorder()
r.ServeHTTP(resp, req)
if resp.Code != http.StatusOK {
t.Errorf("POST /alerts.json returned status %d", resp.Code)
}
ur := models.AlertsResponse{}
err := json.Unmarshal(resp.Body.Bytes(), &ur)
if err != nil {
t.Errorf("Failed to unmarshal response: %s", err)
}
if len(ur.Grids[0].AlertGroups) != len(groupTests) {
t.Errorf("[%s] Got %d alert(s) in response, expected %d",
version, len(ur.Grids[0].AlertGroups), len(groupTests))
}
for _, testCase := range groupTests {
groupFound := false
for _, group := range ur.Grids[0].AlertGroups {
if compareAlertGroups(testCase, group) {
groupFound = true
testAlertGroup(version, t, testCase, group)
}
}
if !groupFound {
t.Errorf("[%s] Expected alert group not found receiver='%s' labels=%v",
version, testCase.receiver, testCase.labels)
}
}
am, foundAM := ur.Silences["default"]
if !foundAM {
t.Errorf("[%s] Alertmanager cluster 'default' missing from silences", version)
for name := range ur.Silences {
t.Logf("Cluster: %s", name)
}
} else if len(am) == 0 {
t.Errorf("[%s] Silences mismatch, expected >0 but got %d", version, len(am))
}
if !reflect.DeepEqual(ur.Filters, filtersExpected) {
t.Errorf("[%s] Filters mismatch, expected %v but got %v", version, filtersExpected, ur.Filters)
}
expectedReceivers := []string{"by-cluster-service", "by-name"}
if diff := cmp.Diff(expectedReceivers, ur.Receivers); diff != "" {
t.Errorf("Incorrect receivers list (-want +got):\n%s", diff)
}
expectedLabelNames := []string{"alertname", "cluster", "disk", "instance", "ip", "job"}
if diff := cmp.Diff(expectedLabelNames, ur.LabelNames); diff != "" {
t.Errorf("Incorrect labelNames list (-want +got):\n%s", diff)
}
}
}
type sortTest struct {
sortOrder string
sortLabel string
expectedLabel string
filter []string
expectedValues []string
defaultSortReverse bool
sortReverse bool
}
var sortTests = []sortTest{
{
filter: []string{"@receiver=by-cluster-service"},
sortOrder: "label",
sortLabel: "cluster",
sortReverse: false,
expectedLabel: "cluster",
expectedValues: []string{"dev", "dev", "prod", "prod", "staging", "staging"},
},
{
filter: []string{"@receiver=by-cluster-service"},
sortOrder: "label",
sortLabel: "cluster",
sortReverse: true,
expectedLabel: "cluster",
expectedValues: []string{"staging", "staging", "prod", "prod", "dev", "dev"},
},
{
filter: []string{"cluster=dev"},
sortOrder: "label",
sortLabel: "cluster",
sortReverse: false,
expectedLabel: "cluster",
expectedValues: []string{"dev", "dev", "dev", "dev"},
},
{
filter: []string{"@receiver=by-cluster-service"},
sortOrder: "label",
sortLabel: "disk",
sortReverse: false,
expectedLabel: "disk",
expectedValues: []string{"sda", "", "", "", "", "", "", "", "", "", "", ""},
},
{
filter: []string{"@receiver=by-cluster-service"},
sortOrder: "label",
sortLabel: "disk",
sortReverse: true,
expectedLabel: "disk",
expectedValues: []string{"", "", "", "", "", "", "", "", "", "", "", "sda"},
},
{
filter: []string{"@receiver=by-cluster-service"},
sortOrder: "disabled",
sortLabel: "",
sortReverse: false,
expectedLabel: "cluster",
expectedValues: []string{"dev", "prod", "prod", "staging", "dev", "staging"},
},
{
filter: []string{"@receiver=by-cluster-service"},
sortOrder: "disabled",
sortLabel: "",
sortReverse: true,
expectedLabel: "cluster",
expectedValues: []string{"staging", "dev", "staging", "prod", "prod", "dev"},
},
{
filter: []string{"@receiver=by-cluster-service"},
sortOrder: "",
sortLabel: "",
sortReverse: false,
expectedLabel: "cluster",
expectedValues: []string{"dev", "dev", "prod", "prod", "staging", "staging"},
},
{
filter: []string{"@receiver=by-cluster-service"},
sortOrder: "",
sortLabel: "",
sortReverse: true,
expectedLabel: "cluster",
expectedValues: []string{"staging", "staging", "prod", "prod", "dev", "dev"},
},
{
filter: []string{"@receiver=by-cluster-service"},
sortOrder: "label",
sortLabel: "job",
sortReverse: false,
expectedLabel: "job",
expectedValues: []string{"node_exporter", "node_exporter", "node_exporter", "node_ping", "node_ping", "node_ping"},
},
{
filter: []string{"@receiver=by-cluster-service"},
sortOrder: "label",
sortLabel: "job",
sortReverse: true,
expectedLabel: "job",
expectedValues: []string{"node_ping", "node_ping", "node_ping", "node_exporter", "node_exporter", "node_exporter"},
},
}
func TestSortOrder(t *testing.T) {
mockConfig(t.Setenv)
config.Config.Grid.Sorting.Order = "label"
config.Config.Grid.Sorting.Label = "cluster"
config.Config.Grid.Sorting.CustomValues.Labels = map[string]map[string]string{}
config.Config.Grid.Sorting.CustomValues.Labels["job"] = map[string]string{
"node_exporter": "1",
"node_ping": "2",
}
for _, version := range mock.ListAllMocks() {
for i := 1; i <= 3; i++ {
t.Logf("Testing API using mock files from Alertmanager %s [try %d]", version, i)
mockAlerts(version)
r := testRouter()
setupRouter(r, nil)
for _, testCase := range sortTests {
apiCache.Purge()
config.Config.Grid.Sorting.Reverse = testCase.defaultSortReverse
payload, err := json.Marshal(models.AlertsRequest{
Filters: testCase.filter,
GridLimits: map[string]int{},
SortLabel: testCase.sortLabel,
SortOrder: testCase.sortOrder,
SortReverse: testCase.sortReverse,
DefaultGroupLimit: 5,
})
if err != nil {
t.Error(err)
t.FailNow()
}
req := httptest.NewRequest("POST", "/alerts.json", bytes.NewReader(payload))
resp := httptest.NewRecorder()
r.ServeHTTP(resp, req)
if resp.Code != http.StatusOK {
t.Errorf("POST /alerts.json returned status %d", resp.Code)
}
ur := models.AlertsResponse{}
err = json.Unmarshal(resp.Body.Bytes(), &ur)
if err != nil {
t.Errorf("Failed to unmarshal response: %s", err)
}
if len(ur.Grids) == 0 {
if len(testCase.expectedValues) > 0 {
t.Errorf("Got empty grids but expected %d groups", len(testCase.expectedValues))
}
} else {
values := []string{}
for _, ag := range ur.Grids[0].AlertGroups {
v := ag.Labels.Get(testCase.expectedLabel)
if v == "" {
v = ag.Shared.Labels.Get(testCase.expectedLabel)
}
if v != "" {
values = append(values, v)
} else {
for _, alert := range ag.Alerts {
v = alert.Labels.Get(testCase.expectedLabel)
values = append(values, v)
}
}
}
if diff := cmp.Diff(testCase.expectedValues, values); diff != "" {
t.Errorf("Incorrectly sorted values for filter=%q order=%s label=%q reverse=%v (-want +got):\n%s",
strings.Join(testCase.filter, " "), testCase.sortOrder, testCase.sortLabel, testCase.sortReverse, diff)
}
}
}
}
}
}
func verifyStrippedLabels(t *testing.T, ls models.OrderedLabels, keep, strip []string) {
for _, l := range strip {
if v := ls.Get(l); v != "" {
t.Errorf("Found stripped label %s=%s on %v", l, v, ls)
}
}
if len(keep) > 0 && len(strip) == 0 {
for _, ll := range ls {
ok := false
for _, l := range keep {
if ll.Name == l {
ok = true
}
}
if !ok {
t.Errorf("Found label %s=%s that's not on the keep list: %v", ll.Name, ll.Value, keep)
}
}
}
}
func TestStripLabels(t *testing.T) {
type testCaseT struct {
keep []string
strip []string
}
testCases := []testCaseT{
{keep: []string{}, strip: []string{}},
{keep: []string{}, strip: []string{"alertname"}},
{keep: []string{"alertname"}, strip: []string{}},
}
payload, err := json.Marshal(models.AlertsRequest{
Filters: []string{},
GridLimits: map[string]int{},
DefaultGroupLimit: 5,
})
if err != nil {
t.Error(err)
t.FailNow()
}
mockConfig(t.Setenv)
for _, version := range mock.ListAllMocks() {
t.Logf("Testing API using mock files from Alertmanager %s", version)
mockAlerts(version)
r := testRouter()
setupRouter(r, nil)
for _, testCase := range testCases {
config.Config.Labels.Keep = testCase.keep
config.Config.Labels.Strip = testCase.strip
apiCache.Purge()
req := httptest.NewRequest("POST", "/alerts.json", bytes.NewReader(payload))
resp := httptest.NewRecorder()
r.ServeHTTP(resp, req)
if resp.Code != http.StatusOK {
t.Errorf("POST /alerts.json returned status %d", resp.Code)
}
ur := models.AlertsResponse{}
err := json.Unmarshal(resp.Body.Bytes(), &ur)
if err != nil {
t.Errorf("Failed to unmarshal response: %s", err)
}
for _, grid := range ur.Grids {
for _, ag := range grid.AlertGroups {
for _, alert := range ag.Alerts {
verifyStrippedLabels(t, alert.Labels, testCase.keep, testCase.strip)
}
verifyStrippedLabels(t, ag.Labels, testCase.keep, testCase.strip)
verifyStrippedLabels(t, ag.Shared.Labels, testCase.keep, testCase.strip)
}
}
}
}
}