diff --git a/src/sentry/analytics/events/__init__.py b/src/sentry/analytics/events/__init__.py index b059eb52c36966..ecba52dceba6ad 100644 --- a/src/sentry/analytics/events/__init__.py +++ b/src/sentry/analytics/events/__init__.py @@ -2,6 +2,7 @@ from .alert_created import * # noqa: F401,F403 from .alert_edited import * # noqa: F401,F403 from .alert_rule_ui_component_webhook_sent import * # noqa: F401,F403 +from .alert_sent import * # noqa: F401,F403 from .api_token_created import * # noqa: F401,F403 from .api_token_deleted import * # noqa: F401,F403 from .artifactbundle_assemble import * # noqa: F401,F403 diff --git a/src/sentry/analytics/events/alert_sent.py b/src/sentry/analytics/events/alert_sent.py new file mode 100644 index 00000000000000..ccc0b33c6378eb --- /dev/null +++ b/src/sentry/analytics/events/alert_sent.py @@ -0,0 +1,22 @@ +from sentry import analytics + + +class AlertSentEvent(analytics.Event): + type = "alert.sent" + + attributes = ( + analytics.Attribute("organization_id"), + analytics.Attribute("project_id"), + # The id of the Alert or AlertRule + analytics.Attribute("alert_id"), + # "issue_alert" or "metric_alert" + analytics.Attribute("alert_type"), + # Slack, msteams, email, etc. + analytics.Attribute("provider"), + # User_id if sent via email, channel id if sent via slack, etc. + analytics.Attribute("external_id", type=str, required=False), + analytics.Attribute("notification_uuid", required=False), + ) + + +analytics.register(AlertSentEvent) diff --git a/src/sentry/incidents/action_handlers.py b/src/sentry/incidents/action_handlers.py index 257abbe1bbad40..f0a3469903b0f2 100644 --- a/src/sentry/incidents/action_handlers.py +++ b/src/sentry/incidents/action_handlers.py @@ -9,7 +9,7 @@ from django.template.defaultfilters import pluralize from django.urls import reverse -from sentry import features +from sentry import analytics, features from sentry.charts.types import ChartSize from sentry.constants import CRASH_RATE_ALERT_AGGREGATE_ALIAS from sentry.incidents.charts import build_metric_alert_chart @@ -33,6 +33,7 @@ class ActionHandler(metaclass=abc.ABCMeta): status_display = {TriggerStatus.ACTIVE: "Fired", TriggerStatus.RESOLVED: "Resolved"} + provider: str def __init__(self, action, incident, project): self.action = action @@ -57,6 +58,20 @@ def resolve( ): pass + def record_alert_sent_analytics( + self, external_id: int | str | None = None, notification_uuid: str | None = None + ): + analytics.record( + "alert.sent", + organization_id=self.incident.organization_id, + project_id=self.project.id, + provider=self.provider, + alert_id=self.incident.alert_rule_id, + alert_type="metric_alert", + external_id=str(external_id) if external_id is not None else "", + notification_uuid=notification_uuid or "", + ) + class DefaultActionHandler(ActionHandler): def fire( @@ -93,6 +108,8 @@ def send_alert( [AlertRuleTriggerAction.TargetType.USER, AlertRuleTriggerAction.TargetType.TEAM], ) class EmailActionHandler(ActionHandler): + provider = "email" + def _get_targets(self) -> Set[int]: target = self.action.target if not target: @@ -160,6 +177,7 @@ def email_users( notification_uuid, ) self.build_message(email_context, trigger_status, user_id).send_async(to=[email]) + self.record_alert_sent_analytics(user_id, notification_uuid) def build_message(self, context, status, user_id) -> MessageBuilder: display = self.status_display[status] @@ -182,6 +200,8 @@ def build_message(self, context, status, user_id) -> MessageBuilder: integration_provider="slack", ) class SlackActionHandler(DefaultActionHandler): + provider = "slack" + def send_alert( self, metric_value: int | float, @@ -190,9 +210,11 @@ def send_alert( ): from sentry.integrations.slack.utils import send_incident_alert_notification - send_incident_alert_notification( + success = send_incident_alert_notification( self.action, self.incident, metric_value, new_status, notification_uuid ) + if success: + self.record_alert_sent_analytics(self.action.target_identifier, notification_uuid) @AlertRuleTriggerAction.register_type( @@ -202,6 +224,8 @@ def send_alert( integration_provider="msteams", ) class MsTeamsActionHandler(DefaultActionHandler): + provider = "msteams" + def send_alert( self, metric_value: int | float, @@ -210,9 +234,11 @@ def send_alert( ): from sentry.integrations.msteams.utils import send_incident_alert_notification - send_incident_alert_notification( + success = send_incident_alert_notification( self.action, self.incident, metric_value, new_status, notification_uuid ) + if success: + self.record_alert_sent_analytics(self.action.target_identifier, notification_uuid) @AlertRuleTriggerAction.register_type( @@ -222,6 +248,8 @@ def send_alert( integration_provider="pagerduty", ) class PagerDutyActionHandler(DefaultActionHandler): + provider = "pagerduty" + def send_alert( self, metric_value: int | float, @@ -230,9 +258,11 @@ def send_alert( ): from sentry.integrations.pagerduty.utils import send_incident_alert_notification - send_incident_alert_notification( + success = send_incident_alert_notification( self.action, self.incident, metric_value, new_status, notification_uuid ) + if success: + self.record_alert_sent_analytics(self.action.target_identifier, notification_uuid) @AlertRuleTriggerAction.register_type( @@ -242,6 +272,8 @@ def send_alert( integration_provider="opsgenie", ) class OpsgenieActionHandler(DefaultActionHandler): + provider = "opsgenie" + def send_alert( self, metric_value: int | float, @@ -250,9 +282,11 @@ def send_alert( ): from sentry.integrations.opsgenie.utils import send_incident_alert_notification - send_incident_alert_notification( + success = send_incident_alert_notification( self.action, self.incident, metric_value, new_status, notification_uuid ) + if success: + self.record_alert_sent_analytics(self.action.target_identifier, notification_uuid) @AlertRuleTriggerAction.register_type( @@ -261,6 +295,8 @@ def send_alert( [AlertRuleTriggerAction.TargetType.SENTRY_APP], ) class SentryAppActionHandler(DefaultActionHandler): + provider = "sentry_app" + def send_alert( self, metric_value: int | float, @@ -269,9 +305,11 @@ def send_alert( ): from sentry.rules.actions.notify_event_service import send_incident_alert_notification - send_incident_alert_notification( + success = send_incident_alert_notification( self.action, self.incident, new_status, metric_value, notification_uuid ) + if success: + self.record_alert_sent_analytics(self.action.sentry_app_id, notification_uuid) def format_duration(minutes): diff --git a/src/sentry/integrations/msteams/utils.py b/src/sentry/integrations/msteams/utils.py index 56adc7d4a45eb5..fde0047c752df0 100644 --- a/src/sentry/integrations/msteams/utils.py +++ b/src/sentry/integrations/msteams/utils.py @@ -102,18 +102,19 @@ def send_incident_alert_notification( metric_value: int | None, new_status: IncidentStatus, notification_uuid: str | None = None, -) -> None: +) -> bool: from .card_builder import build_incident_attachment if action.target_identifier is None: raise ValueError("Can't send without `target_identifier`") attachment = build_incident_attachment(incident, new_status, metric_value, notification_uuid) - integration_service.send_msteams_incident_alert_notification( + success = integration_service.send_msteams_incident_alert_notification( integration_id=action.integration_id, channel=action.target_identifier, attachment=attachment, ) + return success def get_preinstall_client(service_url): diff --git a/src/sentry/integrations/opsgenie/utils.py b/src/sentry/integrations/opsgenie/utils.py index f66315e0aff714..00527d1790e18c 100644 --- a/src/sentry/integrations/opsgenie/utils.py +++ b/src/sentry/integrations/opsgenie/utils.py @@ -62,19 +62,19 @@ def send_incident_alert_notification( metric_value: int, new_status: IncidentStatus, notification_uuid: str | None = None, -) -> None: +) -> bool: integration, org_integration = integration_service.get_organization_context( organization_id=incident.organization_id, integration_id=action.integration_id ) if org_integration is None or integration is None or integration.status != ObjectStatus.ACTIVE: logger.info("Opsgenie integration removed, but the rule is still active.") - return + return False team = get_team(org_integration=org_integration, team_id=action.target_identifier) if not team: # team removed, but the rule is still active logger.info("Opsgenie team removed, but the rule is still active.") - return + return False integration_key = team["integration_key"] client = OpsgenieClient( @@ -85,6 +85,7 @@ def send_incident_alert_notification( attachment = build_incident_attachment(incident, new_status, metric_value, notification_uuid) try: client.send_notification(attachment) + return True except ApiError as e: logger.info( "rule.fail.opsgenie_notification", diff --git a/src/sentry/integrations/pagerduty/utils.py b/src/sentry/integrations/pagerduty/utils.py index 43d129f391de1b..8ec6e1bdf30b0c 100644 --- a/src/sentry/integrations/pagerduty/utils.py +++ b/src/sentry/integrations/pagerduty/utils.py @@ -62,7 +62,7 @@ def send_incident_alert_notification( metric_value: int, new_status: IncidentStatus, notification_uuid: str | None = None, -) -> None: +) -> bool: integration_id = action.integration_id organization_id = incident.organization_id @@ -107,6 +107,7 @@ def send_incident_alert_notification( ) try: client.send_trigger(attachment) + return True except ApiError as e: logger.info( "rule.fail.pagerduty_metric_alert", diff --git a/src/sentry/integrations/slack/utils/notifications.py b/src/sentry/integrations/slack/utils/notifications.py index 5f2806fb755b01..fe4a6292b518f4 100644 --- a/src/sentry/integrations/slack/utils/notifications.py +++ b/src/sentry/integrations/slack/utils/notifications.py @@ -24,14 +24,14 @@ def send_incident_alert_notification( metric_value: int, new_status: IncidentStatus, notification_uuid: str | None = None, -) -> None: +) -> bool: # Make sure organization integration is still active: integration, org_integration = integration_service.get_organization_context( organization_id=incident.organization_id, integration_id=action.integration_id ) if org_integration is None or integration is None or integration.status != ObjectStatus.ACTIVE: # Integration removed, but rule is still active. - return + return False chart_url = None if features.has("organizations:metric-alert-chartcuterie", incident.organization): @@ -64,8 +64,10 @@ def send_incident_alert_notification( client = SlackClient(integration_id=integration.id) try: client.post("/chat.postMessage", data=payload, timeout=5) + return True except ApiError: logger.info("rule.fail.slack_post", exc_info=True) + return False def send_slack_response( diff --git a/src/sentry/rules/actions/notify_event_service.py b/src/sentry/rules/actions/notify_event_service.py index 2c2986787babdf..26fa7dde401a7a 100644 --- a/src/sentry/rules/actions/notify_event_service.py +++ b/src/sentry/rules/actions/notify_event_service.py @@ -56,7 +56,7 @@ def send_incident_alert_notification( new_status: IncidentStatus, metric_value: str | None = None, notification_uuid: str | None = None, -) -> None: +) -> bool: """ When a metric alert is triggered, send incident data to the SentryApp's webhook. :param action: The triggered `AlertRuleTriggerAction`. @@ -70,7 +70,7 @@ def send_incident_alert_notification( incident, new_status, metric_value, notification_uuid ) - integration_service.send_incident_alert_notification( + success = integration_service.send_incident_alert_notification( sentry_app_id=action.sentry_app_id, action_id=action.id, incident_id=incident.id, @@ -79,6 +79,7 @@ def send_incident_alert_notification( incident_attachment_json=json.dumps(incident_attachment), metric_value=metric_value, ) + return success def find_alert_rule_action_ui_component(app_platform_event: AppPlatformEvent) -> bool: diff --git a/src/sentry/services/hybrid_cloud/integration/impl.py b/src/sentry/services/hybrid_cloud/integration/impl.py index 43b9f8eb0c6d40..059c7aa7d3d9c3 100644 --- a/src/sentry/services/hybrid_cloud/integration/impl.py +++ b/src/sentry/services/hybrid_cloud/integration/impl.py @@ -348,7 +348,7 @@ def send_incident_alert_notification( incident_attachment_json: str, metric_value: Optional[str] = None, notification_uuid: str | None = None, - ) -> None: + ) -> bool: sentry_app = SentryApp.objects.get(id=sentry_app_id) metrics.incr("notifications.sent", instance=sentry_app.slug, skip_internal=False) @@ -370,7 +370,7 @@ def send_incident_alert_notification( }, exc_info=True, ) - return None + return False app_platform_event = AppPlatformEvent( resource="metric_alert", @@ -395,16 +395,19 @@ def send_incident_alert_notification( sentry_app_id=sentry_app.id, event=f"{app_platform_event.resource}.{app_platform_event.action}", ) + return alert_rule_action_ui_component def send_msteams_incident_alert_notification( self, *, integration_id: int, channel: str, attachment: Dict[str, Any] - ) -> None: + ) -> bool: integration = Integration.objects.get(id=integration_id) client = MsTeamsClient(integration) try: client.send_card(channel, attachment) + return True except ApiError: logger.info("rule.fail.msteams_post", exc_info=True) + return False def delete_integration(self, *, integration_id: int) -> None: integration = Integration.objects.filter(id=integration_id).first() diff --git a/src/sentry/services/hybrid_cloud/integration/service.py b/src/sentry/services/hybrid_cloud/integration/service.py index a4318059b654c1..48aee9862ec27e 100644 --- a/src/sentry/services/hybrid_cloud/integration/service.py +++ b/src/sentry/services/hybrid_cloud/integration/service.py @@ -264,14 +264,14 @@ def send_incident_alert_notification( incident_attachment_json: str, metric_value: Optional[str] = None, notification_uuid: Optional[str] = None, - ) -> None: + ) -> bool: pass @rpc_method @abstractmethod def send_msteams_incident_alert_notification( self, *, integration_id: int, channel: str, attachment: Dict[str, Any] - ) -> None: + ) -> bool: raise NotImplementedError @rpc_method diff --git a/tests/sentry/incidents/action_handlers/test_email.py b/tests/sentry/incidents/action_handlers/test_email.py index 90495dc98a5cff..0656c6c8163fe8 100644 --- a/tests/sentry/incidents/action_handlers/test_email.py +++ b/tests/sentry/incidents/action_handlers/test_email.py @@ -60,6 +60,20 @@ def test_fire_metric_alert(self): def test_resolve_metric_alert(self): self.run_fire_test("resolve") + @patch("sentry.analytics.record") + def test_alert_sent_recorded(self, mock_record): + self.run_fire_test() + mock_record.assert_called_with( + "alert.sent", + organization_id=self.organization.id, + project_id=self.project.id, + provider="email", + alert_id=self.alert_rule.id, + alert_type="metric_alert", + external_id=str(self.user.id), + notification_uuid="", + ) + class EmailActionHandlerGetTargetsTest(TestCase): @cached_property diff --git a/tests/sentry/incidents/action_handlers/test_msteams.py b/tests/sentry/incidents/action_handlers/test_msteams.py index 36a2d8b7d16026..268214da18fae9 100644 --- a/tests/sentry/incidents/action_handlers/test_msteams.py +++ b/tests/sentry/incidents/action_handlers/test_msteams.py @@ -1,4 +1,5 @@ import time +from unittest.mock import patch import responses from freezegun import freeze_time @@ -75,6 +76,20 @@ def test_fire_metric_alert(self): def test_resolve_metric_alert(self): self.run_fire_test("resolve") + @patch("sentry.analytics.record") + def test_alert_sent_recorded(self, mock_record): + self.run_fire_test() + mock_record.assert_called_with( + "alert.sent", + organization_id=self.organization.id, + project_id=self.project.id, + provider="msteams", + alert_id=self.alert_rule.id, + alert_type="metric_alert", + external_id=str(self.action.target_identifier), + notification_uuid="", + ) + @responses.activate def test_rule_snoozed(self): alert_rule = self.create_alert_rule() diff --git a/tests/sentry/incidents/action_handlers/test_opsgenie.py b/tests/sentry/incidents/action_handlers/test_opsgenie.py index 3156fb68e03ff7..b48037e7c3bf5d 100644 --- a/tests/sentry/incidents/action_handlers/test_opsgenie.py +++ b/tests/sentry/incidents/action_handlers/test_opsgenie.py @@ -198,3 +198,17 @@ def test_missing_team(self, mock_logger): mock_logger.info.call_args.args[0] == "Opsgenie team removed, but the rule is still active." ) + + @patch("sentry.analytics.record") + def test_alert_sent_recorded(self, mock_record): + self.run_fire_test() + mock_record.assert_called_with( + "alert.sent", + organization_id=self.organization.id, + project_id=self.project.id, + provider="opsgenie", + alert_id=self.alert_rule.id, + alert_type="metric_alert", + external_id=str(self.action.target_identifier), + notification_uuid="", + ) diff --git a/tests/sentry/incidents/action_handlers/test_pagerduty.py b/tests/sentry/incidents/action_handlers/test_pagerduty.py index ceeea356e72f47..beec4d734805e7 100644 --- a/tests/sentry/incidents/action_handlers/test_pagerduty.py +++ b/tests/sentry/incidents/action_handlers/test_pagerduty.py @@ -1,4 +1,5 @@ import uuid +from unittest.mock import patch import responses from freezegun import freeze_time @@ -144,3 +145,17 @@ def test_rule_snoozed(self): handler.fire(metric_value, IncidentStatus(incident.status)) assert len(responses.calls) == 0 + + @patch("sentry.analytics.record") + def test_alert_sent_recorded(self, mock_record): + self.run_fire_test() + mock_record.assert_called_with( + "alert.sent", + organization_id=self.organization.id, + project_id=self.project.id, + provider="pagerduty", + alert_id=self.alert_rule.id, + alert_type="metric_alert", + external_id=str(self.action.target_identifier), + notification_uuid="", + ) diff --git a/tests/sentry/incidents/action_handlers/test_sentry_app.py b/tests/sentry/incidents/action_handlers/test_sentry_app.py index 0fa62301115dd7..1117944f5e75e8 100644 --- a/tests/sentry/incidents/action_handlers/test_sentry_app.py +++ b/tests/sentry/incidents/action_handlers/test_sentry_app.py @@ -1,3 +1,5 @@ +from unittest.mock import patch + import responses from freezegun import freeze_time @@ -106,7 +108,7 @@ def run_test(self, incident, method): from sentry.rules.actions.notify_event_service import build_incident_attachment trigger = self.create_alert_rule_trigger(self.alert_rule, "hi", 1000) - action = self.create_alert_rule_trigger_action( + self.action = self.create_alert_rule_trigger_action( alert_rule_trigger=trigger, target_identifier=self.sentry_app.id, type=AlertRuleTriggerAction.Type.SENTRY_APP, @@ -129,7 +131,7 @@ def run_test(self, incident, method): body=json.dumps({"ok": "true"}), ) - handler = SentryAppActionHandler(action, incident, self.project) + handler = SentryAppActionHandler(self.action, incident, self.project) metric_value = 1000 with self.tasks(): getattr(handler, method)(metric_value, IncidentStatus(incident.status)) @@ -156,3 +158,17 @@ def test_fire_metric_alert(self): def test_resolve_metric_alert(self): self.run_fire_test("resolve") + + @patch("sentry.analytics.record") + def test_alert_sent_recorded(self, mock_record): + self.run_fire_test() + mock_record.assert_called_with( + "alert.sent", + organization_id=self.organization.id, + project_id=self.project.id, + provider="sentry_app", + alert_id=self.alert_rule.id, + alert_type="metric_alert", + external_id=str(self.action.sentry_app_id), + notification_uuid="", + ) diff --git a/tests/sentry/incidents/action_handlers/test_slack.py b/tests/sentry/incidents/action_handlers/test_slack.py index d86be96989a8ec..5f4f5df16c0458 100644 --- a/tests/sentry/incidents/action_handlers/test_slack.py +++ b/tests/sentry/incidents/action_handlers/test_slack.py @@ -1,3 +1,4 @@ +from unittest.mock import patch from urllib.parse import parse_qs import responses @@ -150,3 +151,17 @@ def test_rule_snoozed_by_user_still_sends(self): handler.fire(metric_value, IncidentStatus(incident.status)) assert len(responses.calls) == 1 + + @patch("sentry.analytics.record") + def test_alert_sent_recorded(self, mock_record): + self.run_fire_test() + mock_record.assert_called_with( + "alert.sent", + organization_id=self.organization.id, + project_id=self.project.id, + provider="slack", + alert_id=self.alert_rule.id, + alert_type="metric_alert", + external_id=str(self.action.target_identifier), + notification_uuid="", + )