From 2b5de747a6039fe05957692ea8e13872982ac6b7 Mon Sep 17 00:00:00 2001 From: Dominique Clarke Date: Mon, 9 May 2022 11:55:15 -0400 Subject: [PATCH] [Synthetics] copy alert state to alert context and implement alert recovery (#128693) * copy alert state to alert context * adjust alert translations * uptime - implement alert recovery * adjust tests * [CI] Auto-commit changed files from 'node scripts/eslint --no-cache --fix' * remove unused constant * update snapshot * add default recovery messages * update snapshot * add doesSetRecoveryContext to uptime duration anomaly alert Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com> --- .../plugins/synthetics/common/translations.ts | 69 +++++++--- .../lib/alert_types/duration_anomaly.tsx | 3 +- .../lib/alert_types/monitor_status.test.ts | 3 +- .../lib/alert_types/monitor_status.tsx | 3 +- .../legacy_uptime/lib/alert_types/tls.tsx | 3 +- .../state/api/alert_actions.test.ts | 8 +- .../legacy_uptime/state/api/alert_actions.ts | 16 +-- .../synthetics/server/lib/alerts/common.ts | 11 +- .../lib/alerts/duration_anomaly.test.ts | 130 +++++++++++++----- .../server/lib/alerts/duration_anomaly.ts | 26 +++- .../server/lib/alerts/status_check.test.ts | 119 +++++++++++++++- .../server/lib/alerts/status_check.ts | 72 ++++++---- .../server/lib/alerts/test_utils/index.ts | 28 ++-- .../synthetics/server/lib/alerts/tls.test.ts | 81 +++++++---- .../synthetics/server/lib/alerts/tls.ts | 11 +- .../apps/uptime/simple_down_alert.ts | 2 +- 16 files changed, 444 insertions(+), 141 deletions(-) diff --git a/x-pack/plugins/synthetics/common/translations.ts b/x-pack/plugins/synthetics/common/translations.ts index 9bef65bd9dad6..52f0dbf5d906e 100644 --- a/x-pack/plugins/synthetics/common/translations.ts +++ b/x-pack/plugins/synthetics/common/translations.ts @@ -28,11 +28,23 @@ export const MonitorStatusTranslations = { defaultMessage: 'Monitor {monitorName} with url {monitorUrl} from {observerLocation} {statusMessage} The latest error message is {latestErrorMessage}', values: { - monitorName: '{{state.monitorName}}', - monitorUrl: '{{{state.monitorUrl}}}', - statusMessage: '{{{state.statusMessage}}}', - latestErrorMessage: '{{{state.latestErrorMessage}}}', - observerLocation: '{{state.observerLocation}}', + monitorName: '{{context.monitorName}}', + monitorUrl: '{{{context.monitorUrl}}}', + statusMessage: '{{{context.statusMessage}}}', + latestErrorMessage: '{{{context.latestErrorMessage}}}', + observerLocation: '{{context.observerLocation}}', + }, + } + ), + defaultRecoveryMessage: i18n.translate( + 'xpack.synthetics.alerts.monitorStatus.defaultRecoveryMessage', + { + defaultMessage: + 'Alert for monitor {monitorName} with url {monitorUrl} from {observerLocation} has recovered', + values: { + monitorName: '{{context.monitorName}}', + monitorUrl: '{{{context.monitorUrl}}}', + observerLocation: '{{context.observerLocation}}', }, } ), @@ -46,13 +58,19 @@ export const MonitorStatusTranslations = { export const TlsTranslations = { defaultActionMessage: i18n.translate('xpack.synthetics.alerts.tls.defaultActionMessage', { - defaultMessage: `Detected TLS certificate {commonName} from issuer {issuer} is {status}. Certificate {summary} -`, + defaultMessage: `Detected TLS certificate {commonName} from issuer {issuer} is {status}. Certificate {summary}`, values: { - commonName: '{{state.commonName}}', - issuer: '{{state.issuer}}', - summary: '{{state.summary}}', - status: '{{state.status}}', + commonName: '{{context.commonName}}', + issuer: '{{context.issuer}}', + summary: '{{context.summary}}', + status: '{{context.status}}', + }, + }), + defaultRecoveryMessage: i18n.translate('xpack.synthetics.alerts.tls.defaultRecoveryMessage', { + defaultMessage: `Alert for TLS certificate {commonName} from issuer {issuer} has recovered`, + values: { + commonName: '{{context.commonName}}', + issuer: '{{context.issuer}}', }, }), name: i18n.translate('xpack.synthetics.alerts.tls.clientName', { @@ -103,14 +121,27 @@ export const DurationAnomalyTranslations = { defaultMessage: `Abnormal ({severity} level) response time detected on {monitor} with url {monitorUrl} at {anomalyStartTimestamp}. Anomaly severity score is {severityScore}. Response times as high as {slowestAnomalyResponse} have been detected from location {observerLocation}. Expected response time is {expectedResponseTime}.`, values: { - severity: '{{state.severity}}', - anomalyStartTimestamp: '{{state.anomalyStartTimestamp}}', - monitor: '{{state.monitor}}', - monitorUrl: '{{{state.monitorUrl}}}', - slowestAnomalyResponse: '{{state.slowestAnomalyResponse}}', - expectedResponseTime: '{{state.expectedResponseTime}}', - severityScore: '{{state.severityScore}}', - observerLocation: '{{state.observerLocation}}', + severity: '{{context.severity}}', + anomalyStartTimestamp: '{{context.anomalyStartTimestamp}}', + monitor: '{{context.monitor}}', + monitorUrl: '{{{context.monitorUrl}}}', + slowestAnomalyResponse: '{{context.slowestAnomalyResponse}}', + expectedResponseTime: '{{context.expectedResponseTime}}', + severityScore: '{{context.severityScore}}', + observerLocation: '{{context.observerLocation}}', + }, + } + ), + defaultRecoveryMessage: i18n.translate( + 'xpack.synthetics.alerts.durationAnomaly.defaultRecoveryMessage', + { + defaultMessage: `Alert for abnormal ({severity} level) response time detected on monitor {monitor} with url {monitorUrl} from location {observerLocation} at {anomalyStartTimestamp} has recovered`, + values: { + severity: '{{context.severity}}', + anomalyStartTimestamp: '{{context.anomalyStartTimestamp}}', + monitor: '{{context.monitor}}', + monitorUrl: '{{{context.monitorUrl}}}', + observerLocation: '{{context.observerLocation}}', }, } ), diff --git a/x-pack/plugins/synthetics/public/legacy_uptime/lib/alert_types/duration_anomaly.tsx b/x-pack/plugins/synthetics/public/legacy_uptime/lib/alert_types/duration_anomaly.tsx index c866ca4c76956..5f0c8c07172bb 100644 --- a/x-pack/plugins/synthetics/public/legacy_uptime/lib/alert_types/duration_anomaly.tsx +++ b/x-pack/plugins/synthetics/public/legacy_uptime/lib/alert_types/duration_anomaly.tsx @@ -16,7 +16,7 @@ import { getMonitorRouteFromMonitorId } from '../../../../common/utils/get_monit import { CLIENT_ALERT_TYPES } from '../../../../common/constants/alerts'; import { DurationAnomalyTranslations } from '../../../../common/translations'; -const { defaultActionMessage, description } = DurationAnomalyTranslations; +const { defaultActionMessage, defaultRecoveryMessage, description } = DurationAnomalyTranslations; const DurationAnomalyAlert = React.lazy(() => import('./lazy_wrapper/duration_anomaly')); export const initDurationAnomalyAlertType: AlertTypeInitializer = ({ @@ -34,6 +34,7 @@ export const initDurationAnomalyAlertType: AlertTypeInitializer = ({ description, validate: () => ({ errors: {} }), defaultActionMessage, + defaultRecoveryMessage, requiresAppContext: true, format: ({ fields }) => ({ reason: fields[ALERT_REASON] || '', diff --git a/x-pack/plugins/synthetics/public/legacy_uptime/lib/alert_types/monitor_status.test.ts b/x-pack/plugins/synthetics/public/legacy_uptime/lib/alert_types/monitor_status.test.ts index 2f67219ac1ae5..c4d02806b5913 100644 --- a/x-pack/plugins/synthetics/public/legacy_uptime/lib/alert_types/monitor_status.test.ts +++ b/x-pack/plugins/synthetics/public/legacy_uptime/lib/alert_types/monitor_status.test.ts @@ -202,7 +202,8 @@ describe('monitor status alert type', () => { }) ).toMatchInlineSnapshot(` Object { - "defaultActionMessage": "Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}} The latest error message is {{{state.latestErrorMessage}}}", + "defaultActionMessage": "Monitor {{context.monitorName}} with url {{{context.monitorUrl}}} from {{context.observerLocation}} {{{context.statusMessage}}} The latest error message is {{{context.latestErrorMessage}}}", + "defaultRecoveryMessage": "Alert for monitor {{context.monitorName}} with url {{{context.monitorUrl}}} from {{context.observerLocation}} has recovered", "description": "Alert when a monitor is down or an availability threshold is breached.", "documentationUrl": [Function], "format": [Function], diff --git a/x-pack/plugins/synthetics/public/legacy_uptime/lib/alert_types/monitor_status.tsx b/x-pack/plugins/synthetics/public/legacy_uptime/lib/alert_types/monitor_status.tsx index 0361e6408e43b..f7584cb04320e 100644 --- a/x-pack/plugins/synthetics/public/legacy_uptime/lib/alert_types/monitor_status.tsx +++ b/x-pack/plugins/synthetics/public/legacy_uptime/lib/alert_types/monitor_status.tsx @@ -23,7 +23,7 @@ import { getMonitorRouteFromMonitorId } from '../../../../common/utils/get_monit import { MonitorStatusTranslations } from '../../../../common/translations'; import { CLIENT_ALERT_TYPES } from '../../../../common/constants/alerts'; -const { defaultActionMessage, description } = MonitorStatusTranslations; +const { defaultActionMessage, defaultRecoveryMessage, description } = MonitorStatusTranslations; const MonitorStatusAlert = React.lazy(() => import('./lazy_wrapper/monitor_status')); @@ -54,6 +54,7 @@ export const initMonitorStatusAlertType: AlertTypeInitializer = ({ return validateFunc ? validateFunc(ruleParams) : ({} as ValidationResult); }, defaultActionMessage, + defaultRecoveryMessage, requiresAppContext: false, format: ({ fields }) => ({ reason: fields[ALERT_REASON] || '', diff --git a/x-pack/plugins/synthetics/public/legacy_uptime/lib/alert_types/tls.tsx b/x-pack/plugins/synthetics/public/legacy_uptime/lib/alert_types/tls.tsx index 2c1238028ccf5..b9ab025ecc021 100644 --- a/x-pack/plugins/synthetics/public/legacy_uptime/lib/alert_types/tls.tsx +++ b/x-pack/plugins/synthetics/public/legacy_uptime/lib/alert_types/tls.tsx @@ -14,7 +14,7 @@ import { AlertTypeInitializer } from '.'; import { CERTIFICATES_ROUTE } from '../../../../common/constants/ui'; -const { defaultActionMessage, description } = TlsTranslations; +const { defaultActionMessage, defaultRecoveryMessage, description } = TlsTranslations; const TLSAlert = React.lazy(() => import('./lazy_wrapper/tls_alert')); export const initTlsAlertType: AlertTypeInitializer = ({ core, @@ -29,6 +29,7 @@ export const initTlsAlertType: AlertTypeInitializer = ({ description, validate: () => ({ errors: {} }), defaultActionMessage, + defaultRecoveryMessage, requiresAppContext: false, format: ({ fields }) => ({ reason: fields[ALERT_REASON] || '', diff --git a/x-pack/plugins/synthetics/public/legacy_uptime/state/api/alert_actions.test.ts b/x-pack/plugins/synthetics/public/legacy_uptime/state/api/alert_actions.test.ts index 16c49d7c3afcb..068cdfd90b1ae 100644 --- a/x-pack/plugins/synthetics/public/legacy_uptime/state/api/alert_actions.test.ts +++ b/x-pack/plugins/synthetics/public/legacy_uptime/state/api/alert_actions.test.ts @@ -50,7 +50,7 @@ describe('Alert Actions factory', () => { eventAction: 'trigger', severity: 'error', summary: - 'Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}} The latest error message is {{{state.latestErrorMessage}}}', + 'Monitor {{context.monitorName}} with url {{{context.monitorUrl}}} from {{context.observerLocation}} {{{context.statusMessage}}} The latest error message is {{{context.latestErrorMessage}}}', }, id: 'f2a3b195-ed76-499a-805d-82d24d4eeba9', }, @@ -75,7 +75,7 @@ describe('Alert Actions factory', () => { eventAction: 'trigger', severity: 'error', summary: - 'Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}} The latest error message is {{{state.latestErrorMessage}}}', + 'Monitor {{context.monitorName}} with url {{{context.monitorUrl}}} from {{context.observerLocation}} {{{context.statusMessage}}} The latest error message is {{{context.latestErrorMessage}}}', }, }, ]); @@ -93,7 +93,7 @@ describe('Alert Actions factory', () => { eventAction: 'trigger', severity: 'error', summary: - 'Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}} The latest error message is {{{state.latestErrorMessage}}}', + 'Monitor {{context.monitorName}} with url {{{context.monitorUrl}}} from {{context.observerLocation}} {{{context.statusMessage}}} The latest error message is {{{context.latestErrorMessage}}}', }, id: 'f2a3b195-ed76-499a-805d-82d24d4eeba9', }, @@ -118,7 +118,7 @@ describe('Alert Actions factory', () => { eventAction: 'trigger', severity: 'error', summary: - 'Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}} The latest error message is {{{state.latestErrorMessage}}}', + 'Monitor {{context.monitorName}} with url {{{context.monitorUrl}}} from {{context.observerLocation}} {{{context.statusMessage}}} The latest error message is {{{context.latestErrorMessage}}}', }, }, ]); diff --git a/x-pack/plugins/synthetics/public/legacy_uptime/state/api/alert_actions.ts b/x-pack/plugins/synthetics/public/legacy_uptime/state/api/alert_actions.ts index eabfe42691e8d..31d8c0577780c 100644 --- a/x-pack/plugins/synthetics/public/legacy_uptime/state/api/alert_actions.ts +++ b/x-pack/plugins/synthetics/public/legacy_uptime/state/api/alert_actions.ts @@ -127,11 +127,11 @@ function getIndexActionParams(selectedMonitor: Ping, recovery = false): IndexAct return { documents: [ { - monitorName: '{{state.monitorName}}', - monitorUrl: '{{{state.monitorUrl}}}', + monitorName: '{{context.monitorName}}', + monitorUrl: '{{{context.monitorUrl}}}', statusMessage: getRecoveryMessage(selectedMonitor), latestErrorMessage: '', - observerLocation: '{{state.observerLocation}}', + observerLocation: '{{context.observerLocation}}', }, ], indexOverride: null, @@ -140,11 +140,11 @@ function getIndexActionParams(selectedMonitor: Ping, recovery = false): IndexAct return { documents: [ { - monitorName: '{{state.monitorName}}', - monitorUrl: '{{{state.monitorUrl}}}', - statusMessage: '{{{state.statusMessage}}}', - latestErrorMessage: '{{{state.latestErrorMessage}}}', - observerLocation: '{{state.observerLocation}}', + monitorName: '{{context.monitorName}}', + monitorUrl: '{{{context.monitorUrl}}}', + statusMessage: '{{{context.statusMessage}}}', + latestErrorMessage: '{{{context.latestErrorMessage}}}', + observerLocation: '{{context.observerLocation}}', }, ], indexOverride: null, diff --git a/x-pack/plugins/synthetics/server/lib/alerts/common.ts b/x-pack/plugins/synthetics/server/lib/alerts/common.ts index 8381adce21d2c..f370b258b482f 100644 --- a/x-pack/plugins/synthetics/server/lib/alerts/common.ts +++ b/x-pack/plugins/synthetics/server/lib/alerts/common.ts @@ -8,6 +8,7 @@ import { isRight } from 'fp-ts/lib/Either'; import Mustache from 'mustache'; import { IBasePath } from '@kbn/core/server'; +import { RuleExecutorServices } from '@kbn/alerting-plugin/server'; import { UptimeCommonState, UptimeCommonStateType } from '../../../common/runtime_types'; export type UpdateUptimeAlertState = ( @@ -59,9 +60,17 @@ export const updateState: UpdateUptimeAlertState = (state, isTriggeredNow) => { }; export const generateAlertMessage = (messageTemplate: string, fields: Record) => { - return Mustache.render(messageTemplate, { state: { ...fields } }); + return Mustache.render(messageTemplate, { context: { ...fields }, state: { ...fields } }); }; export const getViewInAppUrl = (relativeViewInAppUrl: string, basePath: IBasePath) => basePath.publicBaseUrl ? new URL(basePath.prepend(relativeViewInAppUrl), basePath.publicBaseUrl).toString() : relativeViewInAppUrl; + +export const setRecoveredAlertsContext = (alertFactory: RuleExecutorServices['alertFactory']) => { + const { getRecoveredAlerts } = alertFactory.done(); + for (const alert of getRecoveredAlerts()) { + const state = alert.getState(); + alert.setContext(state); + } +}; diff --git a/x-pack/plugins/synthetics/server/lib/alerts/duration_anomaly.test.ts b/x-pack/plugins/synthetics/server/lib/alerts/duration_anomaly.test.ts index eb4509850414b..ad821a509b77b 100644 --- a/x-pack/plugins/synthetics/server/lib/alerts/duration_anomaly.test.ts +++ b/x-pack/plugins/synthetics/server/lib/alerts/duration_anomaly.test.ts @@ -12,7 +12,6 @@ import { import { durationAnomalyAlertFactory } from './duration_anomaly'; import { DURATION_ANOMALY } from '../../../common/constants/alerts'; import { AnomaliesTableRecord, AnomalyRecordDoc } from '@kbn/ml-plugin/common/types/anomalies'; -import { DynamicSettings } from '../../../common/runtime_types'; import { createRuleTypeMocks, bootstrapDependencies } from './test_utils'; import { getSeverityType } from '@kbn/ml-plugin/common/util/anomaly_utils'; import { Ping } from '../../../common/runtime_types/ping'; @@ -33,34 +32,6 @@ interface MockAnomalyResult { const monitorId = 'uptime-monitor'; const mockUrl = 'https://elastic.co'; -/** - * This function aims to provide an easy way to give mock props that will - * reduce boilerplate for tests. - * @param dynamic the expiration and aging thresholds received at alert creation time - * @param params the params received at alert creation time - * @param state the state the alert maintains - */ -const mockOptions = ( - dynamicCertSettings?: { - certExpirationThreshold: DynamicSettings['certExpirationThreshold']; - certAgeThreshold: DynamicSettings['certAgeThreshold']; - }, - state = {}, - params = { - timerange: { from: 'now-15m', to: 'now' }, - monitorId, - severity: 'warning', - } -): any => { - const { services } = createRuleTypeMocks(dynamicCertSettings); - - return { - params, - state, - services, - }; -}; - const mockAnomaliesResult: MockAnomalyResult = { anomalies: [ { @@ -94,6 +65,50 @@ const mockPing: Partial = { }, }; +const mockRecoveredAlerts = mockAnomaliesResult.anomalies.map((result) => ({ + firstCheckedAt: 'date', + firstTriggeredAt: undefined, + lastCheckedAt: 'date', + lastResolvedAt: undefined, + isTriggered: false, + anomalyStartTimestamp: 'date', + currentTriggerStarted: undefined, + expectedResponseTime: `${Math.round(result.typicalSort / 1000)} ms`, + lastTriggeredAt: undefined, + monitor: monitorId, + monitorUrl: mockPing.url?.full, + observerLocation: result.entityValue, + severity: getSeverityType(result.severity), + severityScore: result.severity, + slowestAnomalyResponse: `${Math.round(result.actualSort / 1000)} ms`, + bucketSpan: result.source.bucket_span, +})); + +/** + * This function aims to provide an easy way to give mock props that will + * reduce boilerplate for tests. + * @param dynamic the expiration and aging thresholds received at alert creation time + * @param params the params received at alert creation time + * @param state the state the alert maintains + */ +const mockOptions = ( + state = {}, + params = { + timerange: { from: 'now-15m', to: 'now' }, + monitorId, + severity: 'warning', + } +): any => { + const { services, setContext } = createRuleTypeMocks(mockRecoveredAlerts); + + return { + params, + state, + services, + setContext, + }; +}; + describe('duration anomaly alert', () => { let toISOStringSpy: jest.SpyInstance; const mockDate = 'date'; @@ -206,7 +221,7 @@ Response times as high as ${slowestResponse} ms have been detected from location )} level) response time detected on uptime-monitor with url ${ mockPing.url?.full } at date. Anomaly severity score is ${anomaly.severity}. - Response times as high as ${slowestResponse} ms have been detected from location ${ +Response times as high as ${slowestResponse} ms have been detected from location ${ anomaly.entityValue }. Expected response time is ${typicalResponse} ms.`; @@ -218,7 +233,17 @@ Response times as high as ${slowestResponse} ms have been detected from location Array [ "xpack.uptime.alerts.actionGroups.durationAnomaly", Object { - "${ALERT_REASON_MSG}": "${reasonMessages[0]}", + "anomalyStartTimestamp": "date", + "bucketSpan": 900, + "expectedResponseTime": "10 ms", + "monitor": "uptime-monitor", + "monitorUrl": "https://elastic.co", + "observerLocation": "harrisburg", + "${ALERT_REASON_MSG}": "Abnormal (minor level) response time detected on uptime-monitor with url https://elastic.co at date. Anomaly severity score is 25. + Response times as high as 200 ms have been detected from location harrisburg. Expected response time is 10 ms.", + "severity": "minor", + "severityScore": 25, + "slowestAnomalyResponse": "200 ms", "${VIEW_IN_APP_URL}": "http://localhost:5601/hfe/app/uptime/monitor/eHBhY2sudXB0aW1lLmFsZXJ0cy5hY3Rpb25Hcm91cHMuZHVyYXRpb25Bbm9tYWx5MA==?dateRangeEnd=now&dateRangeStart=2022-03-17T13%3A13%3A33.755Z", }, ] @@ -227,11 +252,52 @@ Response times as high as ${slowestResponse} ms have been detected from location Array [ "xpack.uptime.alerts.actionGroups.durationAnomaly", Object { - "${ALERT_REASON_MSG}": "${reasonMessages[1]}", + "anomalyStartTimestamp": "date", + "bucketSpan": 900, + "expectedResponseTime": "20 ms", + "monitor": "uptime-monitor", + "monitorUrl": "https://elastic.co", + "observerLocation": "fairbanks", + "${ALERT_REASON_MSG}": "Abnormal (warning level) response time detected on uptime-monitor with url https://elastic.co at date. Anomaly severity score is 10. + Response times as high as 300 ms have been detected from location fairbanks. Expected response time is 20 ms.", + "severity": "warning", + "severityScore": 10, + "slowestAnomalyResponse": "300 ms", "${VIEW_IN_APP_URL}": "http://localhost:5601/hfe/app/uptime/monitor/eHBhY2sudXB0aW1lLmFsZXJ0cy5hY3Rpb25Hcm91cHMuZHVyYXRpb25Bbm9tYWx5MQ==?dateRangeEnd=now&dateRangeStart=2022-03-17T13%3A13%3A33.755Z", }, ] `); }); + + it('sets alert recovery context for recovered alerts', async () => { + toISOStringSpy.mockImplementation(() => mockDate); + const mockResultServiceProviderGetter: jest.Mock<{ + getAnomaliesTableData: jest.Mock; + }> = jest.fn(); + const mockGetAnomliesTableDataGetter: jest.Mock = jest.fn(); + const mockGetLatestMonitorGetter: jest.Mock> = jest.fn(); + + mockGetLatestMonitorGetter.mockReturnValue(mockPing); + mockGetAnomliesTableDataGetter.mockReturnValue(mockAnomaliesResult); + mockResultServiceProviderGetter.mockReturnValue({ + getAnomaliesTableData: mockGetAnomliesTableDataGetter, + }); + const { server, libs, plugins } = bootstrapDependencies( + { getLatestMonitor: mockGetLatestMonitorGetter }, + { + ml: { + resultsServiceProvider: mockResultServiceProviderGetter, + }, + } + ); + const alert = durationAnomalyAlertFactory(server, libs, plugins); + const options = mockOptions(); + // @ts-ignore the executor can return `void`, but ours never does + const state: Record = await alert.executor(options); + expect(options.setContext).toHaveBeenCalledTimes(2); + mockRecoveredAlerts.forEach((alertState) => { + expect(options.setContext).toHaveBeenCalledWith(alertState); + }); + }); }); }); diff --git a/x-pack/plugins/synthetics/server/lib/alerts/duration_anomaly.ts b/x-pack/plugins/synthetics/server/lib/alerts/duration_anomaly.ts index f2ec05b11f5ea..a93d44013708b 100644 --- a/x-pack/plugins/synthetics/server/lib/alerts/duration_anomaly.ts +++ b/x-pack/plugins/synthetics/server/lib/alerts/duration_anomaly.ts @@ -15,7 +15,12 @@ import { import { ActionGroupIdsOf } from '@kbn/alerting-plugin/common'; import { AnomaliesTableRecord } from '@kbn/ml-plugin/common/types/anomalies'; import { getSeverityType } from '@kbn/ml-plugin/common/util/anomaly_utils'; -import { updateState, generateAlertMessage, getViewInAppUrl } from './common'; +import { + updateState, + generateAlertMessage, + getViewInAppUrl, + setRecoveredAlertsContext, +} from './common'; import { CLIENT_ALERT_TYPES, DURATION_ANOMALY } from '../../../common/constants/alerts'; import { commonStateTranslations, durationAnomalyTranslations } from './translations'; import { UptimeCorePluginsSetup } from '../adapters/framework'; @@ -94,14 +99,26 @@ export const durationAnomalyAlertFactory: UptimeAlertTypeFactory }, ], actionVariables: { - context: [ACTION_VARIABLES[ALERT_REASON_MSG], ACTION_VARIABLES[VIEW_IN_APP_URL]], + context: [ + ACTION_VARIABLES[ALERT_REASON_MSG], + ACTION_VARIABLES[VIEW_IN_APP_URL], + ...durationAnomalyTranslations.actionVariables, + ...commonStateTranslations, + ], state: [...durationAnomalyTranslations.actionVariables, ...commonStateTranslations], }, isExportable: true, minimumLicenseRequired: 'platinum', + doesSetRecoveryContext: true, async executor({ params, - services: { alertWithLifecycle, scopedClusterClient, savedObjectsClient, getAlertStartedDate }, + services: { + alertWithLifecycle, + scopedClusterClient, + savedObjectsClient, + getAlertStartedDate, + alertFactory, + }, state, startedAt, }) { @@ -160,10 +177,13 @@ export const durationAnomalyAlertFactory: UptimeAlertTypeFactory alertInstance.scheduleActions(DURATION_ANOMALY.id, { [ALERT_REASON_MSG]: alertReasonMessage, [VIEW_IN_APP_URL]: getViewInAppUrl(relativeViewInAppUrl, basePath), + ...summary, }); }); } + setRecoveredAlertsContext(alertFactory); + return updateState(state, foundAnomalies); }, }); diff --git a/x-pack/plugins/synthetics/server/lib/alerts/status_check.test.ts b/x-pack/plugins/synthetics/server/lib/alerts/status_check.test.ts index 84e7c0d68400c..b9a90ee18038a 100644 --- a/x-pack/plugins/synthetics/server/lib/alerts/status_check.test.ts +++ b/x-pack/plugins/synthetics/server/lib/alerts/status_check.test.ts @@ -56,6 +56,53 @@ const mockMonitors = [ }, ]; +const mockRecoveredAlerts = [ + { + currentTriggerStarted: '2022-04-25T14:36:31.511Z', + firstCheckedAt: '2022-04-25T14:10:30.785Z', + firstTriggeredAt: '2022-04-25T14:10:30.785Z', + lastCheckedAt: '2022-04-25T14:36:31.511Z', + lastTriggeredAt: '2022-04-25T14:36:31.511Z', + lastResolvedAt: '2022-04-25T14:23:43.007Z', + isTriggered: true, + monitorUrl: 'https://expired.badssl.com/', + monitorId: 'expired-badssl', + monitorName: 'BadSSL Expired', + monitorType: 'http', + latestErrorMessage: + 'Get "https://expired.badssl.com/": x509: certificate has expired or is not yet valid: current time 2022-04-25T10:36:27-04:00 is after 2015-04-12T23:59:59Z', + observerLocation: 'Unnamed-location', + observerHostname: 'Dominiques-MacBook-Pro-2.local', + reason: + 'BadSSL Expired from Unnamed-location failed 2 times in the last 3 mins. Alert when > 1.', + statusMessage: 'failed 2 times in the last 3 mins. Alert when > 1.', + start: '2022-04-25T14:36:31.621Z', + duration: 315110000000, + }, + { + currentTriggerStarted: '2022-04-25T14:36:31.511Z', + firstCheckedAt: '2022-04-25T14:10:30.785Z', + firstTriggeredAt: '2022-04-25T14:10:30.785Z', + lastCheckedAt: '2022-04-25T14:36:31.511Z', + lastTriggeredAt: '2022-04-25T14:36:31.511Z', + lastResolvedAt: '2022-04-25T14:23:43.007Z', + isTriggered: true, + monitorUrl: 'https://invalid.badssl.com/', + monitorId: 'expired-badssl', + monitorName: 'BadSSL Expired', + monitorType: 'http', + latestErrorMessage: + 'Get "https://invalid.badssl.com/": x509: certificate has expired or is not yet valid: current time 2022-04-25T10:36:27-04:00 is after 2015-04-12T23:59:59Z', + observerLocation: 'Unnamed-location', + observerHostname: 'Dominiques-MacBook-Pro-2.local', + reason: + 'BadSSL Expired from Unnamed-location failed 2 times in the last 3 mins. Alert when > 1.', + statusMessage: 'failed 2 times in the last 3 mins. Alert when > 1.', + start: '2022-04-25T14:36:31.621Z', + duration: 315110000000, + }, +]; + const mockCommonAlertDocumentFields = (monitorInfo: GetMonitorStatusResult['monitorInfo']) => ({ 'agent.name': monitorInfo.agent?.name, 'error.message': monitorInfo.error?.message, @@ -121,13 +168,14 @@ const mockOptions = ( }, } ): any => { - const { services } = createRuleTypeMocks(); + const { services, setContext } = createRuleTypeMocks(mockRecoveredAlerts); return { params, state, services, rule, + setContext, }; }; @@ -142,6 +190,7 @@ describe('status check alert', () => { afterEach(() => { jest.clearAllMocks(); }); + describe('executor', () => { it('does not trigger when there are no monitors down', async () => { expect.assertions(5); @@ -242,7 +291,15 @@ describe('status check alert', () => { Array [ "xpack.uptime.alerts.actionGroups.monitorStatus", Object { + "latestErrorMessage": "error message 1", + "monitorId": "first", + "monitorName": "First", + "monitorType": "myType", + "monitorUrl": "localhost:8080", + "observerHostname": undefined, + "observerLocation": "harrisburg", "reason": "First from harrisburg failed 234 times in the last 15 mins. Alert when > 5.", + "statusMessage": "failed 234 times in the last 15 mins. Alert when > 5.", "viewInAppUrl": "http://localhost:5601/hfe/app/uptime/monitor/Zmlyc3Q=?dateRangeEnd=now&dateRangeStart=2022-03-17T13%3A13%3A33.755Z&filters=%5B%5B%22observer.geo.name%22%2C%5B%22harrisburg%22%5D%5D%5D", }, ] @@ -313,7 +370,15 @@ describe('status check alert', () => { Array [ "xpack.uptime.alerts.actionGroups.monitorStatus", Object { + "latestErrorMessage": "error message 1", + "monitorId": "first", + "monitorName": "First", + "monitorType": "myType", + "monitorUrl": "localhost:8080", + "observerHostname": undefined, + "observerLocation": "harrisburg", "reason": "First from harrisburg failed 234 times in the last 15m. Alert when > 5.", + "statusMessage": "failed 234 times in the last 15m. Alert when > 5.", "viewInAppUrl": "http://localhost:5601/hfe/app/uptime/monitor/Zmlyc3Q=?dateRangeEnd=now&dateRangeStart=2022-03-17T13%3A13%3A33.755Z&filters=%5B%5B%22observer.geo.name%22%2C%5B%22harrisburg%22%5D%5D%5D", }, ] @@ -785,28 +850,60 @@ describe('status check alert', () => { Array [ "xpack.uptime.alerts.actionGroups.monitorStatus", Object { + "latestErrorMessage": undefined, + "monitorId": "foo", + "monitorName": "Foo", + "monitorType": "myType", + "monitorUrl": "https://foo.com", + "observerHostname": undefined, + "observerLocation": "harrisburg", "reason": "Foo from harrisburg 35 days availability is 99.28%. Alert when < 99.34%.", + "statusMessage": "35 days availability is 99.28%. Alert when < 99.34%.", "viewInAppUrl": "http://localhost:5601/hfe/app/uptime/monitor/Zm9v?dateRangeEnd=now&dateRangeStart=2022-03-17T13%3A13%3A33.755Z&filters=%5B%5B%22observer.geo.name%22%2C%5B%22harrisburg%22%5D%5D%5D", }, ], Array [ "xpack.uptime.alerts.actionGroups.monitorStatus", Object { + "latestErrorMessage": undefined, + "monitorId": "foo", + "monitorName": "Foo", + "monitorType": "myType", + "monitorUrl": "https://foo.com", + "observerHostname": undefined, + "observerLocation": "fairbanks", "reason": "Foo from fairbanks 35 days availability is 98.03%. Alert when < 99.34%.", + "statusMessage": "35 days availability is 98.03%. Alert when < 99.34%.", "viewInAppUrl": "http://localhost:5601/hfe/app/uptime/monitor/Zm9v?dateRangeEnd=now&dateRangeStart=2022-03-17T13%3A13%3A33.755Z&filters=%5B%5B%22observer.geo.name%22%2C%5B%22fairbanks%22%5D%5D%5D", }, ], Array [ "xpack.uptime.alerts.actionGroups.monitorStatus", Object { + "latestErrorMessage": undefined, + "monitorId": "unreliable", + "monitorName": "Unreliable", + "monitorType": "myType", + "monitorUrl": "https://unreliable.co", + "observerHostname": undefined, + "observerLocation": "fairbanks", "reason": "Unreliable from fairbanks 35 days availability is 90.92%. Alert when < 99.34%.", + "statusMessage": "35 days availability is 90.92%. Alert when < 99.34%.", "viewInAppUrl": "http://localhost:5601/hfe/app/uptime/monitor/dW5yZWxpYWJsZQ==?dateRangeEnd=now&dateRangeStart=2022-03-17T13%3A13%3A33.755Z&filters=%5B%5B%22observer.geo.name%22%2C%5B%22fairbanks%22%5D%5D%5D", }, ], Array [ "xpack.uptime.alerts.actionGroups.monitorStatus", Object { + "latestErrorMessage": undefined, + "monitorId": "no-name", + "monitorName": "no-name", + "monitorType": "myType", + "monitorUrl": "https://no-name.co", + "observerHostname": undefined, + "observerLocation": "fairbanks", "reason": "no-name from fairbanks 35 days availability is 90.92%. Alert when < 99.34%.", + "statusMessage": "35 days availability is 90.92%. Alert when < 99.34%.", "viewInAppUrl": "http://localhost:5601/hfe/app/uptime/monitor/bm8tbmFtZQ==?dateRangeEnd=now&dateRangeStart=2022-03-17T13%3A13%3A33.755Z&filters=%5B%5B%22observer.geo.name%22%2C%5B%22fairbanks%22%5D%5D%5D", }, ], @@ -909,6 +1006,26 @@ describe('status check alert', () => { }) ); }); + + it('sets alert recovery context for recovered alerts', async () => { + toISOStringSpy.mockImplementation(() => 'foo date string'); + const mockGetter: jest.Mock = jest.fn(); + + mockGetter.mockReturnValue(mockMonitors); + const { server, libs, plugins } = bootstrapDependencies({ getMonitorStatus: mockGetter }); + const alert = statusCheckAlertFactory(server, libs, plugins); + const options = mockOptions(); + // @ts-ignore the executor can return `void`, but ours never does + const state: Record = await alert.executor(options); + expect(options.setContext).toHaveBeenCalledTimes(2); + mockRecoveredAlerts.forEach((alertState) => { + expect(options.setContext).toHaveBeenCalledWith(alertState); + }); + }); + }); + + describe('alert recovery', () => { + it('sets context for alert recovery', () => {}); }); describe('alert factory', () => { diff --git a/x-pack/plugins/synthetics/server/lib/alerts/status_check.ts b/x-pack/plugins/synthetics/server/lib/alerts/status_check.ts index d305dedea3e10..243749f686106 100644 --- a/x-pack/plugins/synthetics/server/lib/alerts/status_check.ts +++ b/x-pack/plugins/synthetics/server/lib/alerts/status_check.ts @@ -21,7 +21,7 @@ import { GetMonitorAvailabilityParams, } from '../../../common/runtime_types'; import { CLIENT_ALERT_TYPES, MONITOR_STATUS } from '../../../common/constants/alerts'; -import { updateState, getViewInAppUrl } from './common'; +import { updateState, getViewInAppUrl, setRecoveredAlertsContext } from './common'; import { commonMonitorStateI18, commonStateTranslations, @@ -47,6 +47,7 @@ import { import { getMonitorRouteFromMonitorId } from '../../../common/utils/get_monitor_url'; export type ActionGroupIds = ActionGroupIdsOf; + /** * Returns the appropriate range for filtering the documents by `@timestamp`. * @@ -75,22 +76,6 @@ export function getTimestampRange({ }; } -const getMonIdByLoc = (monitorId: string, location: string) => { - return monitorId + '-' + location; -}; - -const uniqueDownMonitorIds = (items: GetMonitorStatusResult[]): Set => - items.reduce( - (acc, { monitorId, location }) => acc.add(getMonIdByLoc(monitorId, location)), - new Set() - ); - -const uniqueAvailMonitorIds = (items: GetMonitorAvailabilityResult[]): Set => - items.reduce( - (acc, { monitorId, location }) => acc.add(getMonIdByLoc(monitorId, location)), - new Set() - ); - export const getUniqueIdsByLoc = ( downMonitorsByLocation: GetMonitorStatusResult[], availabilityResults: GetMonitorAvailabilityResult[] @@ -161,7 +146,7 @@ export const getMonitorSummary = (monitorInfo: Ping, statusMessage: string) => { return { ...summary, - reason: `${monitorName} from ${observerLocation} ${statusMessage}`, + [ALERT_REASON_MSG]: `${monitorName} from ${observerLocation} ${statusMessage}`, }; }; @@ -222,6 +207,22 @@ export const getInstanceId = (monitorInfo: Ping, monIdByLoc: string) => { return `${urlText}_${monIdByLoc}`; }; +const getMonIdByLoc = (monitorId: string, location: string) => { + return monitorId + '-' + location; +}; + +const uniqueDownMonitorIds = (items: GetMonitorStatusResult[]): Set => + items.reduce( + (acc, { monitorId, location }) => acc.add(getMonIdByLoc(monitorId, location)), + new Set() + ); + +const uniqueAvailMonitorIds = (items: GetMonitorAvailabilityResult[]): Set => + items.reduce( + (acc, { monitorId, location }) => acc.add(getMonIdByLoc(monitorId, location)), + new Set() + ); + export const statusCheckAlertFactory: UptimeAlertTypeFactory = (server, libs) => ({ id: CLIENT_ALERT_TYPES.MONITOR_STATUS, producer: 'uptime', @@ -281,15 +282,23 @@ export const statusCheckAlertFactory: UptimeAlertTypeFactory = ( ACTION_VARIABLES[MONITOR_WITH_GEO], ACTION_VARIABLES[ALERT_REASON_MSG], ACTION_VARIABLES[VIEW_IN_APP_URL], + ...commonMonitorStateI18, ], state: [...commonMonitorStateI18, ...commonStateTranslations], }, isExportable: true, minimumLicenseRequired: 'basic', + doesSetRecoveryContext: true, async executor({ params: rawParams, state, - services: { savedObjectsClient, scopedClusterClient, alertWithLifecycle, getAlertStartedDate }, + services: { + savedObjectsClient, + scopedClusterClient, + alertWithLifecycle, + getAlertStartedDate, + alertFactory, + }, rule: { schedule: { interval }, }, @@ -314,14 +323,12 @@ export const statusCheckAlertFactory: UptimeAlertTypeFactory = ( }); const filterString = await formatFilterString(uptimeEsClient, filters, search, libs); - const timespanInterval = `${String(timerangeCount)}${timerangeUnit}`; // Range filter for `monitor.timespan`, the range of time the ping is valid const timespanRange = oldVersionTimeRange || { from: `now-${timespanInterval}`, to: 'now', }; - // Range filter for `@timestamp`, the time the document was indexed const timestampRange = getTimestampRange({ ruleScheduleLookback: `now-${interval}`, @@ -364,10 +371,14 @@ export const statusCheckAlertFactory: UptimeAlertTypeFactory = ( fields: getMonitorAlertDocument(monitorSummary), }); - alert.replaceState({ - ...state, + const context = { ...monitorSummary, statusMessage, + }; + + alert.replaceState({ + ...state, + ...context, ...updateState(state, true), }); @@ -381,10 +392,11 @@ export const statusCheckAlertFactory: UptimeAlertTypeFactory = ( }); alert.scheduleActions(MONITOR_STATUS.id, { - [ALERT_REASON_MSG]: monitorSummary.reason, [VIEW_IN_APP_URL]: getViewInAppUrl(relativeViewInAppUrl, basePath), + ...context, }); } + setRecoveredAlertsContext(alertFactory); return updateState(state, downMonitorsByLocation.length > 0); } @@ -436,11 +448,16 @@ export const statusCheckAlertFactory: UptimeAlertTypeFactory = ( fields: getMonitorAlertDocument(monitorSummary), }); - alert.replaceState({ - ...updateState(state, true), + const context = { ...monitorSummary, statusMessage, + }; + + alert.replaceState({ + ...updateState(state, true), + ...context, }); + const relativeViewInAppUrl = getMonitorRouteFromMonitorId({ monitorId: monitorSummary.monitorId, dateRangeEnd: 'now', @@ -451,10 +468,11 @@ export const statusCheckAlertFactory: UptimeAlertTypeFactory = ( }); alert.scheduleActions(MONITOR_STATUS.id, { - [ALERT_REASON_MSG]: monitorSummary.reason, [VIEW_IN_APP_URL]: getViewInAppUrl(relativeViewInAppUrl, basePath), + ...context, }); }); + setRecoveredAlertsContext(alertFactory); return updateState(state, downMonitorsByLocation.length > 0); }, }); diff --git a/x-pack/plugins/synthetics/server/lib/alerts/test_utils/index.ts b/x-pack/plugins/synthetics/server/lib/alerts/test_utils/index.ts index af248af730eee..456b0675eee87 100644 --- a/x-pack/plugins/synthetics/server/lib/alerts/test_utils/index.ts +++ b/x-pack/plugins/synthetics/server/lib/alerts/test_utils/index.ts @@ -13,8 +13,6 @@ import { UMServerLibs } from '../../lib'; import { UptimeCorePluginsSetup, UptimeServerSetup } from '../../adapters'; import type { UptimeRouter } from '../../../types'; import { getUptimeESMockClient } from '../../requests/helper'; -import { DynamicSettings } from '../../../../common/runtime_types'; -import { DYNAMIC_SETTINGS_DEFAULTS } from '../../../../common/constants'; /** * The alert takes some dependencies as parameters; these are things like @@ -41,15 +39,7 @@ export const bootstrapDependencies = (customRequests?: any, customPlugins: any = return { server, libs, plugins }; }; -export const createRuleTypeMocks = ( - dynamicCertSettings: { - certAgeThreshold: DynamicSettings['certAgeThreshold']; - certExpirationThreshold: DynamicSettings['certExpirationThreshold']; - } = { - certAgeThreshold: DYNAMIC_SETTINGS_DEFAULTS.certAgeThreshold, - certExpirationThreshold: DYNAMIC_SETTINGS_DEFAULTS.certExpirationThreshold, - } -) => { +export const createRuleTypeMocks = (recoveredAlerts: Array> = []) => { const loggerMock = { debug: jest.fn(), warn: jest.fn(), @@ -58,10 +48,17 @@ export const createRuleTypeMocks = ( const scheduleActions = jest.fn(); const replaceState = jest.fn(); + const setContext = jest.fn(); const services = { ...getUptimeESMockClient(), ...alertsMock.createRuleExecutorServices(), + alertFactory: { + ...alertsMock.createRuleExecutorServices().alertFactory, + done: () => ({ + getRecoveredAlerts: () => createRecoveredAlerts(recoveredAlerts, setContext), + }), + }, alertWithLifecycle: jest.fn().mockReturnValue({ scheduleActions, replaceState }), getAlertStartedDate: jest.fn().mockReturnValue('2022-03-17T13:13:33.755Z'), logger: loggerMock, @@ -77,5 +74,14 @@ export const createRuleTypeMocks = ( services, scheduleActions, replaceState, + setContext, }; }; + +const createRecoveredAlerts = (alerts: Array>, setContext: jest.Mock) => { + return alerts.map((alert) => ({ + getState: () => alert, + setContext, + context: {}, + })); +}; diff --git a/x-pack/plugins/synthetics/server/lib/alerts/tls.test.ts b/x-pack/plugins/synthetics/server/lib/alerts/tls.test.ts index 31a5e98bf9f02..88f8b964eb590 100644 --- a/x-pack/plugins/synthetics/server/lib/alerts/tls.test.ts +++ b/x-pack/plugins/synthetics/server/lib/alerts/tls.test.ts @@ -7,7 +7,7 @@ import moment from 'moment'; import { tlsAlertFactory, getCertSummary } from './tls'; import { TLS } from '../../../common/constants/alerts'; -import { CertResult, DynamicSettings } from '../../../common/runtime_types'; +import { CertResult } from '../../../common/runtime_types'; import { createRuleTypeMocks, bootstrapDependencies } from './test_utils'; import { DYNAMIC_SETTINGS_DEFAULTS } from '../../../common/constants'; @@ -19,24 +19,6 @@ import { savedObjectsAdapter, UMSavedObjectsAdapter } from '../saved_objects/sav * @param params the params received at alert creation time * @param state the state the alert maintains */ -const mockOptions = ( - dynamicCertSettings?: { - certExpirationThreshold: DynamicSettings['certExpirationThreshold']; - certAgeThreshold: DynamicSettings['certAgeThreshold']; - }, - state = {} -): any => { - const { services } = createRuleTypeMocks(dynamicCertSettings); - const params = { - timerange: { from: 'now-15m', to: 'now' }, - }; - - return { - params, - state, - services, - }; -}; const mockCertResult: CertResult = { certs: [ @@ -76,6 +58,35 @@ const mockCertResult: CertResult = { total: 4, }; +const mockRecoveredAlerts = [ + { + commonName: mockCertResult.certs[0].common_name ?? '', + issuer: mockCertResult.certs[0].issuer ?? '', + summary: 'sample summary', + status: 'expired', + }, + { + commonName: mockCertResult.certs[1].common_name ?? '', + issuer: mockCertResult.certs[1].issuer ?? '', + summary: 'sample summary 2', + status: 'aging', + }, +]; + +const mockOptions = (state = {}): any => { + const { services, setContext } = createRuleTypeMocks(mockRecoveredAlerts); + const params = { + timerange: { from: 'now-15m', to: 'now' }, + }; + + return { + params, + state, + services, + setContext, + }; +}; + describe('tls alert', () => { let toISOStringSpy: jest.SpyInstance; let savedObjectsAdapterSpy: jest.SpyInstance< @@ -131,16 +142,18 @@ describe('tls alert', () => { const [{ value: alertInstanceMock }] = alertWithLifecycle.mock.results; expect(alertInstanceMock.replaceState).toHaveBeenCalledTimes(4); mockCertResult.certs.forEach((cert) => { - expect(alertInstanceMock.replaceState).toBeCalledWith( - expect.objectContaining({ - commonName: cert.common_name, - issuer: cert.issuer, - status: 'expired', - }) + const context = { + commonName: cert.common_name, + issuer: cert.issuer, + status: 'expired', + }; + expect(alertInstanceMock.replaceState).toBeCalledWith(expect.objectContaining(context)); + expect(alertInstanceMock.scheduleActions).toBeCalledWith( + TLS.id, + expect.objectContaining(context) ); }); expect(alertInstanceMock.scheduleActions).toHaveBeenCalledTimes(4); - expect(alertInstanceMock.scheduleActions).toBeCalledWith(TLS.id); }); it('handles dynamic settings for aging or expiration threshold', async () => { @@ -167,6 +180,22 @@ describe('tls alert', () => { }) ); }); + + it('sets alert recovery context for recovered alerts', async () => { + toISOStringSpy.mockImplementation(() => 'foo date string'); + const mockGetter: jest.Mock = jest.fn(); + + mockGetter.mockReturnValue(mockCertResult); + const { server, libs, plugins } = bootstrapDependencies({ getCerts: mockGetter }); + const alert = tlsAlertFactory(server, libs, plugins); + const options = mockOptions(); + // @ts-ignore the executor can return `void`, but ours never does + const state: Record = await alert.executor(options); + expect(options.setContext).toHaveBeenCalledTimes(2); + mockRecoveredAlerts.forEach((alertState) => { + expect(options.setContext).toHaveBeenCalledWith(alertState); + }); + }); }); describe('getCertSummary', () => { diff --git a/x-pack/plugins/synthetics/server/lib/alerts/tls.ts b/x-pack/plugins/synthetics/server/lib/alerts/tls.ts index 0a6fb24c88156..127171eab0f4d 100644 --- a/x-pack/plugins/synthetics/server/lib/alerts/tls.ts +++ b/x-pack/plugins/synthetics/server/lib/alerts/tls.ts @@ -9,7 +9,7 @@ import { schema } from '@kbn/config-schema'; import { ALERT_REASON } from '@kbn/rule-data-utils'; import { ActionGroupIdsOf } from '@kbn/alerting-plugin/common'; import { UptimeAlertTypeFactory } from './types'; -import { updateState, generateAlertMessage } from './common'; +import { updateState, generateAlertMessage, setRecoveredAlertsContext } from './common'; import { CLIENT_ALERT_TYPES, TLS } from '../../../common/constants/alerts'; import { DYNAMIC_SETTINGS_DEFAULTS } from '../../../common/constants'; import { Cert, CertResult } from '../../../common/runtime_types'; @@ -108,13 +108,14 @@ export const tlsAlertFactory: UptimeAlertTypeFactory = (_server, }, ], actionVariables: { - context: [], + context: [...tlsTranslations.actionVariables, ...commonStateTranslations], state: [...tlsTranslations.actionVariables, ...commonStateTranslations], }, isExportable: true, minimumLicenseRequired: 'basic', + doesSetRecoveryContext: true, async executor({ - services: { alertWithLifecycle, savedObjectsClient, scopedClusterClient }, + services: { alertWithLifecycle, savedObjectsClient, scopedClusterClient, alertFactory }, state, }) { const dynamicSettings = await savedObjectsAdapter.getUptimeDynamicSettings(savedObjectsClient); @@ -173,10 +174,12 @@ export const tlsAlertFactory: UptimeAlertTypeFactory = (_server, ...updateState(state, foundCerts), ...summary, }); - alertInstance.scheduleActions(TLS.id); + alertInstance.scheduleActions(TLS.id, { ...summary }); }); } + setRecoveredAlertsContext(alertFactory); + return updateState(state, foundCerts); }, }); diff --git a/x-pack/test/functional_with_es_ssl/apps/uptime/simple_down_alert.ts b/x-pack/test/functional_with_es_ssl/apps/uptime/simple_down_alert.ts index 425ce5a55524d..963acca117881 100644 --- a/x-pack/test/functional_with_es_ssl/apps/uptime/simple_down_alert.ts +++ b/x-pack/test/functional_with_es_ssl/apps/uptime/simple_down_alert.ts @@ -107,7 +107,7 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => { group: 'xpack.uptime.alerts.actionGroups.monitorStatus', params: { message: - 'Monitor {{state.monitorName}} with url {{{state.monitorUrl}}} from {{state.observerLocation}} {{{state.statusMessage}}} The latest error message is {{{state.latestErrorMessage}}}', + 'Monitor {{context.monitorName}} with url {{{context.monitorUrl}}} from {{context.observerLocation}} {{{context.statusMessage}}} The latest error message is {{{context.latestErrorMessage}}}', }, id: 'my-slack1', },