Merge pull request #1812 from cappyzawa/issue-856-count-metrics

Add count metrics for canary successes and failures
This commit is contained in:
Stefan Prodan
2025-10-08 09:49:27 +03:00
committed by GitHub
9 changed files with 584 additions and 35 deletions

View File

@@ -121,4 +121,10 @@ flagger_canary_duration_seconds_count{name="podinfo",namespace="test"} 6
# Last canary metric analysis result per different metrics
flagger_canary_metric_analysis{metric="podinfo-http-successful-rate",name="podinfo",namespace="test"} 1
flagger_canary_metric_analysis{metric="podinfo-custom-metric",name="podinfo",namespace="test"} 0.918223108974359
# Canary successes total counter
flagger_canary_successes_total{name="podinfo",namespace="test",deployment_strategy="canary",analysis_status="completed"} 5
# Canary failures total counter
flagger_canary_failures_total{name="podinfo",namespace="test",deployment_strategy="canary",analysis_status="completed"} 1
```

1
go.mod
View File

@@ -63,6 +63,7 @@ require (
github.com/josharian/intern v1.0.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.17.11 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect

View File

@@ -35,6 +35,13 @@ const (
MetricInterval = "1m"
)
// Deployment strategies
const (
DeploymentStrategyCanary = "canary"
DeploymentStrategyBlueGreen = "blue-green"
DeploymentStrategyABTesting = "ab-testing"
)
// +genclient
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
@@ -640,3 +647,24 @@ func (c *Canary) SkipAnalysis() bool {
}
return c.Spec.SkipAnalysis
}
// DeploymentStrategy returns the deployment strategy based on canary analysis configuration
func (c *Canary) DeploymentStrategy() string {
analysis := c.GetAnalysis()
if analysis == nil {
return DeploymentStrategyCanary
}
// A/B Testing: has match conditions and iterations
if len(analysis.Match) > 0 && analysis.Iterations > 0 {
return DeploymentStrategyABTesting
}
// Blue/Green: has iterations but no match conditions
if analysis.Iterations > 0 {
return DeploymentStrategyBlueGreen
}
// Canary Release: default (has maxWeight, stepWeight, or stepWeights)
return DeploymentStrategyCanary
}

View File

@@ -0,0 +1,94 @@
/*
Copyright 2025 The Flux authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1beta1
import (
"testing"
istiov1alpha1 "github.com/fluxcd/flagger/pkg/apis/istio/common/v1alpha1"
istiov1beta1 "github.com/fluxcd/flagger/pkg/apis/istio/v1beta1"
"github.com/stretchr/testify/assert"
)
func TestCanary_GetDeploymentStrategy(t *testing.T) {
tests := []struct {
name string
analysis *CanaryAnalysis
expected string
}{
{
name: "canary strategy with maxWeight",
analysis: &CanaryAnalysis{
MaxWeight: 30,
StepWeight: 10,
},
expected: DeploymentStrategyCanary,
},
{
name: "canary strategy with stepWeights",
analysis: &CanaryAnalysis{
StepWeights: []int{10, 20, 30},
},
expected: DeploymentStrategyCanary,
},
{
name: "blue-green strategy with iterations",
analysis: &CanaryAnalysis{
Iterations: 5,
},
expected: DeploymentStrategyBlueGreen,
},
{
name: "ab-testing strategy with iterations and match",
analysis: &CanaryAnalysis{
Iterations: 10,
Match: []istiov1beta1.HTTPMatchRequest{
{
Headers: map[string]istiov1alpha1.StringMatch{
"x-canary": {
Exact: "insider",
},
},
},
},
},
expected: DeploymentStrategyABTesting,
},
{
name: "default to canary when analysis is nil",
analysis: nil,
expected: DeploymentStrategyCanary,
},
{
name: "default to canary when analysis is empty",
analysis: &CanaryAnalysis{},
expected: DeploymentStrategyCanary,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
canary := &Canary{
Spec: CanarySpec{
Analysis: tt.analysis,
},
}
result := canary.DeploymentStrategy()
assert.Equal(t, tt.expected, result)
})
}
}

View File

@@ -209,30 +209,35 @@ func alertMetadata(canary *flaggerv1.Canary) []notifier.Field {
},
)
if canary.GetAnalysis().StepWeight > 0 {
fields = append(fields, notifier.Field{
Name: "Traffic routing",
Value: fmt.Sprintf("Weight step: %v max: %v",
canary.GetAnalysis().StepWeight,
canary.GetAnalysis().MaxWeight),
})
} else if len(canary.GetAnalysis().StepWeights) > 0 {
fields = append(fields, notifier.Field{
Name: "Traffic routing",
Value: fmt.Sprintf("Weight steps: %s max: %v",
strings.Trim(strings.Join(strings.Fields(fmt.Sprint(canary.GetAnalysis().StepWeights)), ","), "[]"),
canary.GetAnalysis().MaxWeight),
})
} else if len(canary.GetAnalysis().Match) > 0 {
strategy := canary.DeploymentStrategy()
switch strategy {
case flaggerv1.DeploymentStrategyABTesting:
fields = append(fields, notifier.Field{
Name: "Traffic routing",
Value: "A/B Testing",
})
} else if canary.GetAnalysis().Iterations > 0 {
case flaggerv1.DeploymentStrategyBlueGreen:
fields = append(fields, notifier.Field{
Name: "Traffic routing",
Value: "Blue/Green",
})
default:
// Canary strategy
if canary.GetAnalysis().StepWeight > 0 {
fields = append(fields, notifier.Field{
Name: "Traffic routing",
Value: fmt.Sprintf("Weight step: %v max: %v",
canary.GetAnalysis().StepWeight,
canary.GetAnalysis().MaxWeight),
})
} else if len(canary.GetAnalysis().StepWeights) > 0 {
fields = append(fields, notifier.Field{
Name: "Traffic routing",
Value: fmt.Sprintf("Weight steps: %s max: %v",
strings.Trim(strings.Join(strings.Fields(fmt.Sprint(canary.GetAnalysis().StepWeights)), ","), "[]"),
canary.GetAnalysis().MaxWeight),
})
}
}
return fields
}

View File

@@ -27,6 +27,7 @@ import (
flaggerv1 "github.com/fluxcd/flagger/pkg/apis/flagger/v1beta1"
"github.com/fluxcd/flagger/pkg/canary"
"github.com/fluxcd/flagger/pkg/metrics"
"github.com/fluxcd/flagger/pkg/router"
)
@@ -400,6 +401,12 @@ func (c *Controller) advanceCanary(name string, namespace string) {
return
}
c.recorder.SetStatus(cd, flaggerv1.CanaryPhaseSucceeded)
c.recorder.IncSuccesses(metrics.CanaryMetricLabels{
Name: cd.Spec.TargetRef.Name,
Namespace: cd.Namespace,
DeploymentStrategy: cd.DeploymentStrategy(),
AnalysisStatus: metrics.AnalysisStatusCompleted,
})
c.runPostRolloutHooks(cd, flaggerv1.CanaryPhaseSucceeded)
c.recordEventInfof(cd, "Promotion completed! Scaling down %s.%s", cd.Spec.TargetRef.Name, cd.Namespace)
c.alert(cd, "Canary analysis completed successfully, promotion finished.",
@@ -460,14 +467,12 @@ func (c *Controller) advanceCanary(name string, namespace string) {
}
}
// strategy: A/B testing
if len(cd.GetAnalysis().Match) > 0 && cd.GetAnalysis().Iterations > 0 {
strategy := cd.DeploymentStrategy()
switch strategy {
case flaggerv1.DeploymentStrategyABTesting:
c.runAB(cd, canaryController, meshRouter)
return
}
// strategy: Blue/Green
if cd.GetAnalysis().Iterations > 0 {
case flaggerv1.DeploymentStrategyBlueGreen:
c.runBlueGreen(cd, canaryController, meshRouter, provider, mirrored)
return
}
@@ -814,6 +819,12 @@ func (c *Controller) shouldSkipAnalysis(canary *flaggerv1.Canary, canaryControll
// notify
c.recorder.SetStatus(canary, flaggerv1.CanaryPhaseSucceeded)
c.recorder.IncSuccesses(metrics.CanaryMetricLabels{
Name: canary.Spec.TargetRef.Name,
Namespace: canary.Namespace,
DeploymentStrategy: canary.DeploymentStrategy(),
AnalysisStatus: metrics.AnalysisStatusSkipped,
})
c.recordEventInfof(canary, "Promotion completed! Canary analysis was skipped for %s.%s",
canary.Spec.TargetRef.Name, canary.Namespace)
c.alert(canary, "Canary analysis was skipped, promotion finished.",
@@ -961,6 +972,12 @@ func (c *Controller) rollback(canary *flaggerv1.Canary, canaryController canary.
}
c.recorder.SetStatus(canary, flaggerv1.CanaryPhaseFailed)
c.recorder.IncFailures(metrics.CanaryMetricLabels{
Name: canary.Spec.TargetRef.Name,
Namespace: canary.Namespace,
DeploymentStrategy: canary.DeploymentStrategy(),
AnalysisStatus: metrics.AnalysisStatusCompleted,
})
c.runPostRolloutHooks(canary, flaggerv1.CanaryPhaseFailed)
}

View File

@@ -17,16 +17,18 @@ limitations under the License.
package controller
import (
"context"
"testing"
"github.com/prometheus/client_golang/prometheus/testutil"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.uber.org/zap"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/tools/record"
flaggerv1 "github.com/fluxcd/flagger/pkg/apis/flagger/v1beta1"
"github.com/fluxcd/flagger/pkg/metrics/observers"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
func TestController_checkMetricProviderAvailability(t *testing.T) {
@@ -183,3 +185,172 @@ func TestController_runMetricChecks(t *testing.T) {
assert.Equal(t, true, ctrl.runMetricChecks(canary))
})
}
func TestController_MetricsStateTransition(t *testing.T) {
t.Run("successful canary promotion with count metrics", func(t *testing.T) {
mocks := newDeploymentFixture(nil)
mocks.ctrl.advanceCanary("podinfo", "default")
mocks.makePrimaryReady(t)
mocks.ctrl.advanceCanary("podinfo", "default")
actualStatus := testutil.ToFloat64(mocks.ctrl.recorder.GetStatusMetric().WithLabelValues("podinfo", "default"))
assert.Equal(t, float64(1), actualStatus)
actualTotal := testutil.ToFloat64(mocks.ctrl.recorder.GetTotalMetric().WithLabelValues("default"))
assert.GreaterOrEqual(t, actualTotal, float64(0))
dep2 := newDeploymentTestDeploymentV2()
_, err := mocks.kubeClient.AppsV1().Deployments("default").Update(context.TODO(), dep2, metav1.UpdateOptions{})
require.NoError(t, err)
mocks.ctrl.advanceCanary("podinfo", "default")
mocks.makeCanaryReady(t)
mocks.ctrl.advanceCanary("podinfo", "default")
actualStatus = testutil.ToFloat64(mocks.ctrl.recorder.GetStatusMetric().WithLabelValues("podinfo", "default"))
assert.Equal(t, float64(0), actualStatus)
actualPrimaryWeight := testutil.ToFloat64(mocks.ctrl.recorder.GetWeightMetric().WithLabelValues("podinfo-primary", "default"))
actualCanaryWeight := testutil.ToFloat64(mocks.ctrl.recorder.GetWeightMetric().WithLabelValues("podinfo", "default"))
t.Logf("Progression weights - Primary: %f, Canary: %f", actualPrimaryWeight, actualCanaryWeight)
assert.GreaterOrEqual(t, actualPrimaryWeight, float64(50))
assert.GreaterOrEqual(t, actualCanaryWeight, float64(10))
assert.LessOrEqual(t, actualPrimaryWeight, float64(100))
assert.LessOrEqual(t, actualCanaryWeight, float64(50))
totalWeight := actualPrimaryWeight + actualCanaryWeight
assert.InDelta(t, 100.0, totalWeight, 1.0)
const maxAdvanceAttempts = 10 // sufficient attempts to reach canary completion
for range maxAdvanceAttempts {
mocks.ctrl.advanceCanary("podinfo", "default")
c, err := mocks.flaggerClient.FlaggerV1beta1().Canaries("default").Get(context.TODO(), "podinfo", metav1.GetOptions{})
require.NoError(t, err)
if c.Status.Phase == flaggerv1.CanaryPhaseSucceeded {
break
}
}
successCount := testutil.ToFloat64(mocks.ctrl.recorder.GetSuccessesMetric().WithLabelValues("podinfo", "default", "canary", "completed"))
assert.Equal(t, float64(1), successCount)
})
t.Run("failed canary rollback with count metrics", func(t *testing.T) {
mocks := newDeploymentFixture(nil)
mocks.ctrl.advanceCanary("podinfo", "default")
mocks.makePrimaryReady(t)
mocks.ctrl.advanceCanary("podinfo", "default")
err := mocks.deployer.SyncStatus(mocks.canary, flaggerv1.CanaryStatus{
Phase: flaggerv1.CanaryPhaseProgressing,
FailedChecks: 10,
})
require.NoError(t, err)
c, err := mocks.flaggerClient.FlaggerV1beta1().Canaries("default").Get(context.TODO(), "podinfo", metav1.GetOptions{})
require.NoError(t, err)
cd := c.DeepCopy()
cd.Spec.Analysis.Metrics = append(c.Spec.Analysis.Metrics, flaggerv1.CanaryMetric{
Name: "fail",
Interval: "1m",
ThresholdRange: &flaggerv1.CanaryThresholdRange{
Min: toFloatPtr(0),
Max: toFloatPtr(50),
},
Query: "fail",
})
_, err = mocks.flaggerClient.FlaggerV1beta1().Canaries("default").Update(context.TODO(), cd, metav1.UpdateOptions{})
require.NoError(t, err)
mocks.ctrl.advanceCanary("podinfo", "default")
mocks.ctrl.advanceCanary("podinfo", "default")
actualStatus := testutil.ToFloat64(mocks.ctrl.recorder.GetStatusMetric().WithLabelValues("podinfo", "default"))
assert.Equal(t, float64(2), actualStatus)
actualPrimaryWeight := testutil.ToFloat64(mocks.ctrl.recorder.GetWeightMetric().WithLabelValues("podinfo-primary", "default"))
actualCanaryWeight := testutil.ToFloat64(mocks.ctrl.recorder.GetWeightMetric().WithLabelValues("podinfo", "default"))
assert.Equal(t, float64(100), actualPrimaryWeight)
assert.Equal(t, float64(0), actualCanaryWeight)
failureCount := testutil.ToFloat64(mocks.ctrl.recorder.GetFailuresMetric().WithLabelValues("podinfo", "default", "canary", "completed"))
assert.Equal(t, float64(1), failureCount)
})
t.Run("skipped analysis with count metrics", func(t *testing.T) {
mocks := newDeploymentFixture(nil)
mocks.ctrl.advanceCanary("podinfo", "default")
mocks.makePrimaryReady(t)
mocks.ctrl.advanceCanary("podinfo", "default")
// Enable skip analysis
cd, err := mocks.flaggerClient.FlaggerV1beta1().Canaries("default").Get(context.TODO(), "podinfo", metav1.GetOptions{})
require.NoError(t, err)
cd.Spec.SkipAnalysis = true
_, err = mocks.flaggerClient.FlaggerV1beta1().Canaries("default").Update(context.TODO(), cd, metav1.UpdateOptions{})
require.NoError(t, err)
// Trigger deployment update
dep2 := newDeploymentTestDeploymentV2()
_, err = mocks.kubeClient.AppsV1().Deployments("default").Update(context.TODO(), dep2, metav1.UpdateOptions{})
require.NoError(t, err)
// Skip analysis should succeed immediately
mocks.ctrl.advanceCanary("podinfo", "default")
mocks.makeCanaryReady(t)
mocks.ctrl.advanceCanary("podinfo", "default")
successCount := testutil.ToFloat64(mocks.ctrl.recorder.GetSuccessesMetric().WithLabelValues("podinfo", "default", "canary", "skipped"))
assert.Equal(t, float64(1), successCount)
})
}
func TestController_AnalysisMetricsRecording(t *testing.T) {
t.Run("builtin metrics analysis recording", func(t *testing.T) {
mocks := newDeploymentFixture(nil)
analysis := &flaggerv1.CanaryAnalysis{
Metrics: []flaggerv1.CanaryMetric{
{
Name: "request-success-rate",
ThresholdRange: &flaggerv1.CanaryThresholdRange{
Min: toFloatPtr(99),
Max: toFloatPtr(100),
},
},
{
Name: "request-duration",
ThresholdRange: &flaggerv1.CanaryThresholdRange{
Min: toFloatPtr(0),
Max: toFloatPtr(500),
},
},
},
}
canary := &flaggerv1.Canary{
ObjectMeta: metav1.ObjectMeta{
Name: "podinfo",
Namespace: "default",
},
Spec: flaggerv1.CanarySpec{
TargetRef: flaggerv1.LocalObjectReference{
Name: "podinfo",
},
Analysis: analysis,
},
}
result := mocks.ctrl.runMetricChecks(canary)
assert.True(t, result)
successRateMetric := mocks.ctrl.recorder.GetAnalysisMetric().WithLabelValues("podinfo", "default", "request-success-rate")
assert.NotNil(t, successRateMetric)
durationMetric := mocks.ctrl.recorder.GetAnalysisMetric().WithLabelValues("podinfo", "default", "request-duration")
assert.NotNil(t, durationMetric)
})
}

View File

@@ -24,14 +24,42 @@ import (
"github.com/prometheus/client_golang/prometheus"
)
// Deployment strategies
const (
CanaryStrategy = "canary"
BlueGreenStrategy = "blue-green"
ABTestingStrategy = "ab-testing"
)
// Analysis status
const (
AnalysisStatusCompleted = "completed"
AnalysisStatusSkipped = "skipped"
)
// CanaryMetricLabels holds labels for canary metrics
type CanaryMetricLabels struct {
Name string
Namespace string
DeploymentStrategy string
AnalysisStatus string
}
// Values returns label values as a slice for Prometheus metrics
func (c CanaryMetricLabels) Values() []string {
return []string{c.Name, c.Namespace, c.DeploymentStrategy, c.AnalysisStatus}
}
// Recorder records the canary analysis as Prometheus metrics
type Recorder struct {
info *prometheus.GaugeVec
duration *prometheus.HistogramVec
total *prometheus.GaugeVec
status *prometheus.GaugeVec
weight *prometheus.GaugeVec
analysis *prometheus.GaugeVec
info *prometheus.GaugeVec
duration *prometheus.HistogramVec
total *prometheus.GaugeVec
status *prometheus.GaugeVec
weight *prometheus.GaugeVec
analysis *prometheus.GaugeVec
successes *prometheus.CounterVec
failures *prometheus.CounterVec
}
// NewRecorder creates a new recorder and registers the Prometheus metrics
@@ -74,6 +102,18 @@ func NewRecorder(controller string, register bool) Recorder {
Help: "Last canary analysis result per metric",
}, []string{"name", "namespace", "metric"})
successes := prometheus.NewCounterVec(prometheus.CounterOpts{
Subsystem: controller,
Name: "canary_successes_total",
Help: "Total number of canary successes",
}, []string{"name", "namespace", "deployment_strategy", "analysis_status"})
failures := prometheus.NewCounterVec(prometheus.CounterOpts{
Subsystem: controller,
Name: "canary_failures_total",
Help: "Total number of canary failures",
}, []string{"name", "namespace", "deployment_strategy", "analysis_status"})
if register {
prometheus.MustRegister(info)
prometheus.MustRegister(duration)
@@ -81,15 +121,19 @@ func NewRecorder(controller string, register bool) Recorder {
prometheus.MustRegister(status)
prometheus.MustRegister(weight)
prometheus.MustRegister(analysis)
prometheus.MustRegister(successes)
prometheus.MustRegister(failures)
}
return Recorder{
info: info,
duration: duration,
total: total,
status: status,
weight: weight,
analysis: analysis,
info: info,
duration: duration,
total: total,
status: status,
weight: weight,
analysis: analysis,
successes: successes,
failures: failures,
}
}
@@ -131,3 +175,53 @@ func (cr *Recorder) SetWeight(cd *flaggerv1.Canary, primary int, canary int) {
cr.weight.WithLabelValues(fmt.Sprintf("%s-primary", cd.Spec.TargetRef.Name), cd.Namespace).Set(float64(primary))
cr.weight.WithLabelValues(cd.Spec.TargetRef.Name, cd.Namespace).Set(float64(canary))
}
// IncSuccesses increments the total number of canary successes
func (cr *Recorder) IncSuccesses(labels CanaryMetricLabels) {
cr.successes.WithLabelValues(labels.Values()...).Inc()
}
// IncFailures increments the total number of canary failures
func (cr *Recorder) IncFailures(labels CanaryMetricLabels) {
cr.failures.WithLabelValues(labels.Values()...).Inc()
}
// GetStatusMetric returns the status metric
func (cr *Recorder) GetStatusMetric() *prometheus.GaugeVec {
return cr.status
}
// GetWeightMetric returns the weight metric
func (cr *Recorder) GetWeightMetric() *prometheus.GaugeVec {
return cr.weight
}
// GetTotalMetric returns the total metric
func (cr *Recorder) GetTotalMetric() *prometheus.GaugeVec {
return cr.total
}
// GetInfoMetric returns the info metric
func (cr *Recorder) GetInfoMetric() *prometheus.GaugeVec {
return cr.info
}
// GetDurationMetric returns the duration metric
func (cr *Recorder) GetDurationMetric() *prometheus.HistogramVec {
return cr.duration
}
// GetAnalysisMetric returns the analysis metric
func (cr *Recorder) GetAnalysisMetric() *prometheus.GaugeVec {
return cr.analysis
}
// GetSuccessesMetric returns the successes metric
func (cr *Recorder) GetSuccessesMetric() *prometheus.CounterVec {
return cr.successes
}
// GetFailuresMetric returns the failures metric
func (cr *Recorder) GetFailuresMetric() *prometheus.CounterVec {
return cr.failures
}

View File

@@ -0,0 +1,133 @@
/*
Copyright 2025 The Flux authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package metrics
import (
"testing"
"time"
flaggerv1 "github.com/fluxcd/flagger/pkg/apis/flagger/v1beta1"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/testutil"
"github.com/stretchr/testify/assert"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
func TestRecorder_GetterMethodsWithData(t *testing.T) {
recorder := NewRecorder("test", false)
canary := &flaggerv1.Canary{
ObjectMeta: metav1.ObjectMeta{
Name: "podinfo",
Namespace: "default",
},
Spec: flaggerv1.CanarySpec{
TargetRef: flaggerv1.LocalObjectReference{
Name: "podinfo",
},
},
}
tests := []struct {
name string
setupFunc func(Recorder)
getterFunc func(Recorder) interface{}
labels []string
expected float64
checkValue bool
}{
{
name: "SetAndGetInfo",
setupFunc: func(r Recorder) { r.SetInfo("v1.0.0", "istio") },
getterFunc: func(r Recorder) interface{} { return r.GetInfoMetric() },
labels: []string{"v1.0.0", "istio"},
expected: 1.0,
checkValue: true,
},
{
name: "SetAndGetStatus",
setupFunc: func(r Recorder) { r.SetStatus(canary, flaggerv1.CanaryPhaseSucceeded) },
getterFunc: func(r Recorder) interface{} { return r.GetStatusMetric() },
labels: []string{"podinfo", "default"},
expected: 1.0,
checkValue: true,
},
{
name: "SetAndGetTotal",
setupFunc: func(r Recorder) { r.SetTotal("default", 3) },
getterFunc: func(r Recorder) interface{} { return r.GetTotalMetric() },
labels: []string{"default"},
expected: 3.0,
checkValue: true,
},
{
name: "SetAndGetDuration",
setupFunc: func(r Recorder) { r.SetDuration(canary, time.Second*5) },
getterFunc: func(r Recorder) interface{} { return r.GetDurationMetric() },
labels: nil,
expected: 0,
checkValue: false, // Histogram values can't be easily checked with testutil
},
{
name: "SetAndGetAnalysis",
setupFunc: func(r Recorder) { r.SetAnalysis(canary, "request-success-rate", 99.5) },
getterFunc: func(r Recorder) interface{} { return r.GetAnalysisMetric() },
labels: []string{"podinfo", "default", "request-success-rate"},
expected: 99.5,
checkValue: true,
},
{
name: "IncAndGetSuccesses",
setupFunc: func(r Recorder) {
r.IncSuccesses(CanaryMetricLabels{Name: "podinfo", Namespace: "default", DeploymentStrategy: "canary", AnalysisStatus: "completed"})
},
getterFunc: func(r Recorder) interface{} { return r.GetSuccessesMetric() },
labels: []string{"podinfo", "default", "canary", "completed"},
expected: 1.0,
checkValue: true,
},
{
name: "IncAndGetFailures",
setupFunc: func(r Recorder) {
r.IncFailures(CanaryMetricLabels{Name: "podinfo", Namespace: "default", DeploymentStrategy: "canary", AnalysisStatus: "completed"})
},
getterFunc: func(r Recorder) interface{} { return r.GetFailuresMetric() },
labels: []string{"podinfo", "default", "canary", "completed"},
expected: 1.0,
checkValue: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tt.setupFunc(recorder)
metric := tt.getterFunc(recorder)
assert.NotNil(t, metric)
if tt.checkValue {
if gaugeVec, ok := metric.(*prometheus.GaugeVec); ok {
value := testutil.ToFloat64(gaugeVec.WithLabelValues(tt.labels...))
assert.Equal(t, tt.expected, value)
} else if counterVec, ok := metric.(*prometheus.CounterVec); ok {
value := testutil.ToFloat64(counterVec.WithLabelValues(tt.labels...))
assert.Equal(t, tt.expected, value)
}
}
})
}
}