Skip to content

Commit 70ca232

Browse files
author
Matt Bargenquast
committed
OSD-15393 Further RHOBS webhook implementation and tests added
1 parent a13c183 commit 70ca232

File tree

9 files changed

+255
-107
lines changed

9 files changed

+255
-107
lines changed

go.mod

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ require (
99
github.com/onsi/gomega v1.18.1
1010
github.com/openshift-online/ocm-cli v0.1.62
1111
github.com/openshift-online/ocm-sdk-go v0.1.242
12-
github.com/openshift/ocm-agent-operator v0.0.0-20230320111925-223f3f9c452f
12+
github.com/openshift/ocm-agent-operator v0.0.0-20230321042834-cd46726d7f2c
1313
github.com/prometheus/alertmanager v0.23.0
1414
github.com/prometheus/client_golang v1.12.1
1515
github.com/sirupsen/logrus v1.8.1

go.sum

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -876,8 +876,8 @@ github.com/openshift-online/ocm-sdk-go v0.1.242 h1:fesCAv8tg2gcVAV1aKwCoQrkRmDq+
876876
github.com/openshift-online/ocm-sdk-go v0.1.242/go.mod h1:nHYjrmvR9L7KSRZukysau62iEHKJOQYFhlceqaVAV1Y=
877877
github.com/openshift/api v0.0.0-20220414050251-a83e6f8f1d50/go.mod h1:F/eU6jgr6Q2VhMu1mSpMmygxAELd7+BUxs3NHZ25jV4=
878878
github.com/openshift/build-machinery-go v0.0.0-20211213093930-7e33a7eb4ce3/go.mod h1:b1BuldmJlbA/xYtdZvKi+7j5YGB44qJUJDZ9zwiNCfE=
879-
github.com/openshift/ocm-agent-operator v0.0.0-20230320111925-223f3f9c452f h1:pIg+VSi5phr9GwcsBG3hkp9FD7V/lu3fEPzB/bynw/4=
880-
github.com/openshift/ocm-agent-operator v0.0.0-20230320111925-223f3f9c452f/go.mod h1:BxgryWN2dB1dJvw3aL/8HsbksL2Ghndoi81V4+Dy80Q=
879+
github.com/openshift/ocm-agent-operator v0.0.0-20230321042834-cd46726d7f2c h1:/aymS8doHa12fbSp2f214i/ugwxCPv6fFZjqfXPMams=
880+
github.com/openshift/ocm-agent-operator v0.0.0-20230321042834-cd46726d7f2c/go.mod h1:BxgryWN2dB1dJvw3aL/8HsbksL2Ghndoi81V4+Dy80Q=
881881
github.com/openshift/operator-custom-metrics v0.4.3-0.20220322205053-7b528cc0d6eb/go.mod h1:ttINLaaHI1UhCW8j282QsUeDPb3qbkudMBtbHPntBM4=
882882
github.com/openshift/rosa v1.1.7/go.mod h1:Ap4vfflxHXfzY0MIt1l9dckwG+DkPUDc/vv1mk1cSCQ=
883883
github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis=

pkg/consts/test/test.go

Lines changed: 56 additions & 71 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,6 @@ import (
44
"context"
55
"time"
66

7-
"github.com/openshift/ocm-agent/pkg/ocm"
8-
97
"github.com/prometheus/alertmanager/template"
108
corev1 "k8s.io/api/core/v1"
119
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
@@ -14,6 +12,7 @@ import (
1412
clientgoscheme "k8s.io/client-go/kubernetes/scheme"
1513

1614
ocmagentv1alpha1 "github.com/openshift/ocm-agent-operator/api/v1alpha1"
15+
"github.com/openshift/ocm-agent/pkg/ocm"
1716
)
1817

1918
const (
@@ -59,29 +58,6 @@ var (
5958
},
6059
},
6160
}
62-
TestFleetNotification = ocmagentv1alpha1.FleetNotification{
63-
Name: TestNotificationName,
64-
Summary: "test-summary",
65-
NotificationMessage: "test-notification",
66-
Severity: "test-severity",
67-
ResendWait: 1,
68-
}
69-
TestManagedFleetNotification = ocmagentv1alpha1.ManagedFleetNotification{
70-
ObjectMeta: metav1.ObjectMeta{
71-
Name: "test-mfn",
72-
Namespace: "openshift-ocm-agent-operator",
73-
},
74-
Spec: ocmagentv1alpha1.ManagedFleetNotificationSpec{
75-
FleetNotification: TestFleetNotification,
76-
},
77-
}
78-
TestManagedFleetNotificationRecord = ocmagentv1alpha1.ManagedFleetNotificationRecord{
79-
ObjectMeta: metav1.ObjectMeta{
80-
Name: TestManagedClusterID,
81-
Namespace: "openshift-ocm-agent-operator",
82-
},
83-
Status: ocmagentv1alpha1.ManagedFleetNotificationRecordStatus{},
84-
}
8561
TestManagedNotification = ocmagentv1alpha1.ManagedNotification{
8662
ObjectMeta: metav1.ObjectMeta{
8763
Name: "test-mn",
@@ -105,46 +81,54 @@ var (
10581
Notifications: []ocmagentv1alpha1.Notification{TestNotification},
10682
},
10783
}
108-
TestAlert = template.Alert{
109-
Status: "firing",
110-
Labels: map[string]string{
111-
"managed_notification_template": TestNotificationName,
112-
"send_managed_notification": "true",
113-
"alertname": "TestAlertName",
114-
"alertstate": "firing",
115-
"namespace": "openshift-monitoring",
116-
"openshift_io_alert_source": "platform",
117-
"prometheus": "openshift-monitoring/k8s",
118-
"severity": "info",
84+
TestManagedNotificationList = &ocmagentv1alpha1.ManagedNotificationList{
85+
Items: []ocmagentv1alpha1.ManagedNotification{
86+
TestManagedNotification,
11987
},
120-
StartsAt: time.Now(),
121-
EndsAt: time.Time{},
12288
}
123-
TestFleetAlert = template.Alert{
124-
Status: "firing",
125-
Labels: map[string]string{
126-
"managed_notification_template": TestNotificationName,
127-
"send_managed_notification": "true",
128-
"alertname": "TestAlertName",
129-
"alertstate": "firing",
130-
"namespace": "openshift-monitoring",
131-
"openshift_io_alert_source": "platform",
132-
"prometheus": "openshift-monitoring/k8s",
133-
"severity": "info",
134-
"source": "HCP",
135-
"_mc_id": TestManagedClusterID,
136-
"_id": TestHostedClusterID,
89+
)
90+
91+
func NewFleetNotification() ocmagentv1alpha1.FleetNotification {
92+
return ocmagentv1alpha1.FleetNotification{
93+
Name: TestNotificationName,
94+
Summary: "test-summary",
95+
NotificationMessage: "test-notification",
96+
Severity: "test-severity",
97+
ResendWait: 1,
98+
}
99+
}
100+
101+
func NewManagedFleetNotification() ocmagentv1alpha1.ManagedFleetNotification {
102+
return ocmagentv1alpha1.ManagedFleetNotification{
103+
ObjectMeta: metav1.ObjectMeta{
104+
Name: "test-mfn",
105+
Namespace: "openshift-ocm-agent-operator",
106+
},
107+
Spec: ocmagentv1alpha1.ManagedFleetNotificationSpec{
108+
FleetNotification: NewFleetNotification(),
109+
},
110+
}
111+
}
112+
113+
func NewManagedFleetNotificationRecord() ocmagentv1alpha1.ManagedFleetNotificationRecord {
114+
return ocmagentv1alpha1.ManagedFleetNotificationRecord{
115+
ObjectMeta: metav1.ObjectMeta{
116+
Name: TestManagedClusterID,
117+
Namespace: "openshift-ocm-agent-operator",
118+
},
119+
Status: ocmagentv1alpha1.ManagedFleetNotificationRecordStatus{
120+
ManagementCluster: TestManagedClusterID,
121+
NotificationRecordByName: nil,
137122
},
138-
StartsAt: time.Now(),
139-
EndsAt: time.Time{},
140123
}
141-
TestAlertResolved = template.Alert{
142-
Status: "resolved",
124+
}
125+
126+
func NewTestAlert(resolved, fleet bool) template.Alert {
127+
alert := template.Alert{
143128
Labels: map[string]string{
144129
"managed_notification_template": TestNotificationName,
145130
"send_managed_notification": "true",
146131
"alertname": "TestAlertName",
147-
"alertstate": "resolved",
148132
"namespace": "openshift-monitoring",
149133
"openshift_io_alert_source": "platform",
150134
"prometheus": "openshift-monitoring/k8s",
@@ -153,26 +137,27 @@ var (
153137
StartsAt: time.Now(),
154138
EndsAt: time.Time{},
155139
}
156-
TestManagedNotificationList = &ocmagentv1alpha1.ManagedNotificationList{
157-
Items: []ocmagentv1alpha1.ManagedNotification{
158-
TestManagedNotification,
159-
},
140+
if resolved {
141+
alert.Status = "resolved"
142+
alert.Labels["alertstate"] = "resolved"
160143
}
161-
TestActiveServiceLog = ocm.ServiceLog{
162-
ServiceName: "SREManualAction",
163-
ClusterUUID: "ddb5e04c-87ea-4fcd-b1f9-640981726cc5",
164-
Summary: "Test SL Summary",
165-
InternalOnly: false,
166-
Description: TestNotification.ActiveDesc,
144+
if fleet {
145+
alert.Labels["source"] = "HCP"
146+
alert.Labels["_mc_id"] = TestManagedClusterID
147+
alert.Labels["_id"] = TestHostedClusterID
167148
}
168-
TestResolvedServiceLog = ocm.ServiceLog{
149+
return alert
150+
}
151+
152+
func NewServiceLog(summary, desc string) ocm.ServiceLog {
153+
return ocm.ServiceLog{
169154
ServiceName: "SREManualAction",
170155
ClusterUUID: "ddb5e04c-87ea-4fcd-b1f9-640981726cc5",
171-
Summary: "Test SL Summary",
156+
Summary: summary,
172157
InternalOnly: false,
173-
Description: TestNotification.ResolvedDesc,
158+
Description: desc,
174159
}
175-
)
160+
}
176161

177162
func setScheme(scheme *runtime.Scheme) *runtime.Scheme {
178163
utilruntime.Must(clientgoscheme.AddToScheme(scheme))

pkg/handlers/helper.go

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,9 @@ func isValidAlert(alert template.Alert, fleetMode bool) bool {
109109
log.WithField(LogFieldAlertname, alertname).Error("fleet mode alert has no valid source")
110110
return false
111111
}
112+
} else {
113+
log.WithField(LogFieldAlertname, alertname).Error("fleet mode alert has no source")
114+
return false
112115
}
113116

114117
// An alert in fleet mode must have a management cluster ID label

pkg/handlers/helper_test.go

Lines changed: 24 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -11,40 +11,57 @@ import (
1111
var _ = Describe("Webhook Handler Helpers", func() {
1212

1313
var (
14-
testAlert template.Alert
14+
testAlert template.Alert
15+
testFleetAlert template.Alert
1516
)
1617

1718
BeforeEach(func() {
18-
testAlert = testconst.TestAlert
19+
testAlert = testconst.NewTestAlert(false, false)
20+
testFleetAlert = testconst.NewTestAlert(false, true)
1921
})
2022

2123
Context("When checking if an alert is valid", func() {
2224
Context("When running in non-fleet mode", func() {
2325
It("should indicate a valid alert is valid", func() {
24-
r := isValidAlert(testconst.TestAlert, false)
26+
r := isValidAlert(testAlert, false)
2527
Expect(r).To(BeTrue())
2628
})
2729
It("should invalidate an alert with no name", func() {
2830
delete(testAlert.Labels, AMLabelAlertName)
29-
r := isValidAlert(testconst.TestAlert, false)
31+
r := isValidAlert(testAlert, false)
3032
Expect(r).To(BeFalse())
3133
})
3234
It("should invalidate an alert with no send_managed_notification label", func() {
3335
delete(testAlert.Labels, "send_managed_notification")
34-
r := isValidAlert(testconst.TestAlert, false)
36+
r := isValidAlert(testAlert, false)
3537
Expect(r).To(BeFalse())
3638
})
3739
It("should invalidate an alert with no managed_notification_template label", func() {
3840
delete(testAlert.Labels, "managed_notification_template")
39-
r := isValidAlert(testconst.TestAlert, false)
41+
r := isValidAlert(testAlert, false)
4042
Expect(r).To(BeFalse())
4143
})
4244
})
4345
Context("When running in fleet mode", func() {
4446
It("should indicate a valid alert is valid", func() {
45-
r := isValidAlert(testconst.TestFleetAlert, true)
47+
r := isValidAlert(testFleetAlert, true)
4648
Expect(r).To(BeTrue())
4749
})
50+
It("should invalidate a fleet alert with no source label", func() {
51+
delete(testFleetAlert.Labels, AMLabelAlertSourceName)
52+
r := isValidAlert(testFleetAlert, true)
53+
Expect(r).To(BeFalse())
54+
})
55+
It("should invalidate a fleet alert with no MC label", func() {
56+
delete(testFleetAlert.Labels, AMLabelAlertMCID)
57+
r := isValidAlert(testFleetAlert, true)
58+
Expect(r).To(BeFalse())
59+
})
60+
It("should invalidate a fleet alert with no HC label", func() {
61+
delete(testFleetAlert.Labels, AMLabelAlertHCID)
62+
r := isValidAlert(testFleetAlert, true)
63+
Expect(r).To(BeFalse())
64+
})
4865
})
4966
})
5067
})

pkg/handlers/webhookreceiver.go

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -144,11 +144,7 @@ func (h *WebhookReceiverHandler) processAlert(alert template.Alert, mnl *oav1alp
144144
}
145145
// Send the servicelog for the alert
146146
log.WithFields(log.Fields{LogFieldNotificationName: notification.Name}).Info("will send servicelog for notification")
147-
if firing {
148-
err = h.ocm.SendServiceLog(notification.Summary, notification.ActiveDesc, notification.ResolvedDesc, viper.GetString(config.ClusterID), firing)
149-
} else {
150-
151-
}
147+
err = h.ocm.SendServiceLog(notification.Summary, notification.ActiveDesc, notification.ResolvedDesc, viper.GetString(config.ClusterID), firing)
152148
if err != nil {
153149
log.WithError(err).WithFields(log.Fields{LogFieldNotificationName: notification.Name, LogFieldIsFiring: true}).Error("unable to send a notification")
154150
metrics.SetResponseMetricFailure("service_logs")

pkg/handlers/webhookreceiver_test.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,8 @@ var _ = Describe("Webhook Handlers", func() {
6060
c: mockClient,
6161
ocm: mockOCMClient,
6262
}
63-
testAlert = testconst.TestAlert
64-
testAlertResolved = testconst.TestAlertResolved
63+
testAlert = testconst.NewTestAlert(false, false)
64+
testAlertResolved = testconst.NewTestAlert(true, false)
6565
})
6666
AfterEach(func() {
6767
server.Close()

pkg/handlers/webhookrhobsreceiver.go

Lines changed: 25 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ func (h *WebhookRHOBSReceiverHandler) processAlert(alert template.Alert, mfn oav
110110
for !slSent {
111111

112112
// Fetch the ManagedFleetNotificationRecord, or create it if it does not already exist
113-
var mfnr *oav1alpha1.ManagedFleetNotificationRecord
113+
mfnr := &oav1alpha1.ManagedFleetNotificationRecord{}
114114
err := h.c.Get(context.Background(), client.ObjectKey{
115115
Namespace: OCMAgentNamespaceName,
116116
Name: mcID,
@@ -128,16 +128,25 @@ func (h *WebhookRHOBSReceiverHandler) processAlert(alert template.Alert, mfn oav
128128
}
129129

130130
// Fetch notificationRecordByName and ADD if it doesn't exist
131-
_, err = mfnr.GetNotificationRecordByName(mcID, fn.Name)
131+
nfr, err := mfnr.GetNotificationRecordByName(mcID, fn.Name)
132132
if err != nil {
133133
// add NotificationRecordByName
134-
addNotificationRecordByName(fn.Name, fn.ResendWait, hcID, mfnr)
134+
nfr, err = addNotificationRecordByName(fn.Name, fn.ResendWait, hcID, mfnr)
135+
if err != nil {
136+
return err
137+
}
135138
}
136139

137-
nri, err := mfnr.GetNotificationRecordItem(mcID, fn.Name, hcID)
138-
if err != nil || nri == nil {
139-
return err
140+
// Check if we already have a notification record for this hosted cluster
141+
_, err = mfnr.GetNotificationRecordItem(mcID, fn.Name, hcID)
142+
if err != nil {
143+
// We mustn't, so create one
144+
_, err = mfnr.AddNotificationRecordItem(hcID, nfr)
145+
if err != nil {
146+
return err
147+
}
140148
}
149+
141150
// Has a servicelog already been sent
142151
canBeSent, err := mfnr.CanBeSent(mcID, fn.Name, hcID)
143152
if err != nil {
@@ -148,7 +157,6 @@ func (h *WebhookRHOBSReceiverHandler) processAlert(alert template.Alert, mfn oav
148157
log.WithFields(log.Fields{"notification": fn.Name,
149158
LogFieldResendInterval: fn.ResendWait,
150159
}).Info("not sending a notification as one was already sent recently")
151-
// TODO: don't return nil, update the MNFR status
152160
return nil
153161
}
154162

@@ -167,13 +175,17 @@ func (h *WebhookRHOBSReceiverHandler) processAlert(alert template.Alert, mfn oav
167175
// Count the service log sent by the template name
168176
metrics.CountServiceLogSent(fn.Name, "firing")
169177

170-
ri := mfnr.UpdateNotificationRecordItem(nri)
171-
if ri == nil {
172-
log.WithFields(log.Fields{LogFieldNotificationName: fn.Name, LogFieldManagedNotification: mfn.Name}).WithError(err).Error("unable to update notification status")
178+
ri, err := mfnr.UpdateNotificationRecordItem(fn.Name, hcID)
179+
if err != nil || ri == nil {
180+
log.WithFields(log.Fields{LogFieldNotificationName: fn.Name, LogFieldManagedNotification: mfn.Name}).WithError(err).Error("unable to update notification status in CR")
173181
return err
174182
}
175183

176184
err = h.c.Status().Update(context.TODO(), mfnr)
185+
if err != nil {
186+
log.WithFields(log.Fields{LogFieldNotificationName: fn.Name, LogFieldManagedNotification: mfn.Name}).WithError(err).Error("unable to update notification status on cluster")
187+
return err
188+
}
177189
}
178190
return nil
179191
}
@@ -198,13 +210,13 @@ func (h *WebhookRHOBSReceiverHandler) createManagedFleetNotificationRecord(mcID
198210
}
199211

200212
// add NotificationRecordByName for fleetnotification
201-
func addNotificationRecordByName(name string, rswait int32, hcID string, mfrn *oav1alpha1.ManagedFleetNotificationRecord) *oav1alpha1.NotificationRecordByName {
213+
func addNotificationRecordByName(name string, rswait int32, hcID string, mfrn *oav1alpha1.ManagedFleetNotificationRecord) (*oav1alpha1.NotificationRecordByName, error) {
202214
nfrbn := oav1alpha1.NotificationRecordByName{
203215
NotificationName: name,
204216
ResendWait: rswait,
205217
NotificationRecordItems: nil,
206218
}
207-
mfrn.AddNotificationRecordItem(hcID, &nfrbn)
208219
mfrn.Status.NotificationRecordByName = append(mfrn.Status.NotificationRecordByName, nfrbn)
209-
return &nfrbn
220+
_, err := mfrn.AddNotificationRecordItem(hcID, &nfrbn)
221+
return &nfrbn, err
210222
}

0 commit comments

Comments
 (0)