Skip to content

feat(alerts): Emit alert.sent analytic on incidents #55479

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Sep 5, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/sentry/analytics/events/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from .alert_created import * # noqa: F401,F403
from .alert_edited import * # noqa: F401,F403
from .alert_rule_ui_component_webhook_sent import * # noqa: F401,F403
from .alert_sent import * # noqa: F401,F403
from .api_token_created import * # noqa: F401,F403
from .api_token_deleted import * # noqa: F401,F403
from .artifactbundle_assemble import * # noqa: F401,F403
Expand Down
22 changes: 22 additions & 0 deletions src/sentry/analytics/events/alert_sent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
from sentry import analytics


class AlertSentEvent(analytics.Event):
type = "alert.sent"

attributes = (
analytics.Attribute("organization_id"),
analytics.Attribute("project_id"),
# The id of the Alert or AlertRule
analytics.Attribute("alert_id"),
# "issue_alert" or "metric_alert"
analytics.Attribute("alert_type"),
# Slack, msteams, email, etc.
analytics.Attribute("provider"),
# User_id if sent via email, channel id if sent via slack, etc.
analytics.Attribute("external_id", type=str, required=False),
analytics.Attribute("notification_uuid", required=False),
)


analytics.register(AlertSentEvent)
50 changes: 44 additions & 6 deletions src/sentry/incidents/action_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from django.template.defaultfilters import pluralize
from django.urls import reverse

from sentry import features
from sentry import analytics, features
from sentry.charts.types import ChartSize
from sentry.constants import CRASH_RATE_ALERT_AGGREGATE_ALIAS
from sentry.incidents.charts import build_metric_alert_chart
Expand All @@ -33,6 +33,7 @@

class ActionHandler(metaclass=abc.ABCMeta):
status_display = {TriggerStatus.ACTIVE: "Fired", TriggerStatus.RESOLVED: "Resolved"}
provider: str

def __init__(self, action, incident, project):
self.action = action
Expand All @@ -57,6 +58,20 @@ def resolve(
):
pass

def record_alert_sent_analytics(
self, external_id: int | str | None = None, notification_uuid: str | None = None
):
analytics.record(
"alert.sent",
organization_id=self.incident.organization_id,
project_id=self.project.id,
provider=self.provider,
alert_id=self.incident.alert_rule_id,
alert_type="metric_alert",
external_id=str(external_id) if external_id is not None else "",
notification_uuid=notification_uuid or "",
)


class DefaultActionHandler(ActionHandler):
def fire(
Expand Down Expand Up @@ -93,6 +108,8 @@ def send_alert(
[AlertRuleTriggerAction.TargetType.USER, AlertRuleTriggerAction.TargetType.TEAM],
)
class EmailActionHandler(ActionHandler):
provider = "email"

def _get_targets(self) -> Set[int]:
target = self.action.target
if not target:
Expand Down Expand Up @@ -160,6 +177,7 @@ def email_users(
notification_uuid,
)
self.build_message(email_context, trigger_status, user_id).send_async(to=[email])
self.record_alert_sent_analytics(user_id, notification_uuid)

def build_message(self, context, status, user_id) -> MessageBuilder:
display = self.status_display[status]
Expand All @@ -182,6 +200,8 @@ def build_message(self, context, status, user_id) -> MessageBuilder:
integration_provider="slack",
)
class SlackActionHandler(DefaultActionHandler):
provider = "slack"

def send_alert(
self,
metric_value: int | float,
Expand All @@ -190,9 +210,11 @@ def send_alert(
):
from sentry.integrations.slack.utils import send_incident_alert_notification

send_incident_alert_notification(
success = send_incident_alert_notification(
self.action, self.incident, metric_value, new_status, notification_uuid
)
if success:
self.record_alert_sent_analytics(self.action.target_identifier, notification_uuid)


@AlertRuleTriggerAction.register_type(
Expand All @@ -202,6 +224,8 @@ def send_alert(
integration_provider="msteams",
)
class MsTeamsActionHandler(DefaultActionHandler):
provider = "msteams"

def send_alert(
self,
metric_value: int | float,
Expand All @@ -210,9 +234,11 @@ def send_alert(
):
from sentry.integrations.msteams.utils import send_incident_alert_notification

send_incident_alert_notification(
success = send_incident_alert_notification(
self.action, self.incident, metric_value, new_status, notification_uuid
)
if success:
self.record_alert_sent_analytics(self.action.target_identifier, notification_uuid)


@AlertRuleTriggerAction.register_type(
Expand All @@ -222,6 +248,8 @@ def send_alert(
integration_provider="pagerduty",
)
class PagerDutyActionHandler(DefaultActionHandler):
provider = "pagerduty"

def send_alert(
self,
metric_value: int | float,
Expand All @@ -230,9 +258,11 @@ def send_alert(
):
from sentry.integrations.pagerduty.utils import send_incident_alert_notification

send_incident_alert_notification(
success = send_incident_alert_notification(
self.action, self.incident, metric_value, new_status, notification_uuid
)
if success:
self.record_alert_sent_analytics(self.action.target_identifier, notification_uuid)


@AlertRuleTriggerAction.register_type(
Expand All @@ -242,6 +272,8 @@ def send_alert(
integration_provider="opsgenie",
)
class OpsgenieActionHandler(DefaultActionHandler):
provider = "opsgenie"

def send_alert(
self,
metric_value: int | float,
Expand All @@ -250,9 +282,11 @@ def send_alert(
):
from sentry.integrations.opsgenie.utils import send_incident_alert_notification

send_incident_alert_notification(
success = send_incident_alert_notification(
self.action, self.incident, metric_value, new_status, notification_uuid
)
if success:
self.record_alert_sent_analytics(self.action.target_identifier, notification_uuid)


@AlertRuleTriggerAction.register_type(
Expand All @@ -261,6 +295,8 @@ def send_alert(
[AlertRuleTriggerAction.TargetType.SENTRY_APP],
)
class SentryAppActionHandler(DefaultActionHandler):
provider = "sentry_app"

def send_alert(
self,
metric_value: int | float,
Expand All @@ -269,9 +305,11 @@ def send_alert(
):
from sentry.rules.actions.notify_event_service import send_incident_alert_notification

send_incident_alert_notification(
success = send_incident_alert_notification(
self.action, self.incident, new_status, metric_value, notification_uuid
)
if success:
self.record_alert_sent_analytics(self.action.sentry_app_id, notification_uuid)


def format_duration(minutes):
Expand Down
5 changes: 3 additions & 2 deletions src/sentry/integrations/msteams/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,18 +102,19 @@ def send_incident_alert_notification(
metric_value: int | None,
new_status: IncidentStatus,
notification_uuid: str | None = None,
) -> None:
) -> bool:
from .card_builder import build_incident_attachment

if action.target_identifier is None:
raise ValueError("Can't send without `target_identifier`")

attachment = build_incident_attachment(incident, new_status, metric_value, notification_uuid)
integration_service.send_msteams_incident_alert_notification(
success = integration_service.send_msteams_incident_alert_notification(
integration_id=action.integration_id,
channel=action.target_identifier,
attachment=attachment,
)
return success


def get_preinstall_client(service_url):
Expand Down
7 changes: 4 additions & 3 deletions src/sentry/integrations/opsgenie/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,19 +62,19 @@ def send_incident_alert_notification(
metric_value: int,
new_status: IncidentStatus,
notification_uuid: str | None = None,
) -> None:
) -> bool:
integration, org_integration = integration_service.get_organization_context(
organization_id=incident.organization_id, integration_id=action.integration_id
)
if org_integration is None or integration is None or integration.status != ObjectStatus.ACTIVE:
logger.info("Opsgenie integration removed, but the rule is still active.")
return
return False

team = get_team(org_integration=org_integration, team_id=action.target_identifier)
if not team:
# team removed, but the rule is still active
logger.info("Opsgenie team removed, but the rule is still active.")
return
return False

integration_key = team["integration_key"]
client = OpsgenieClient(
Expand All @@ -85,6 +85,7 @@ def send_incident_alert_notification(
attachment = build_incident_attachment(incident, new_status, metric_value, notification_uuid)
try:
client.send_notification(attachment)
return True
except ApiError as e:
logger.info(
"rule.fail.opsgenie_notification",
Expand Down
3 changes: 2 additions & 1 deletion src/sentry/integrations/pagerduty/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def send_incident_alert_notification(
metric_value: int,
new_status: IncidentStatus,
notification_uuid: str | None = None,
) -> None:
) -> bool:
integration_id = action.integration_id
organization_id = incident.organization_id

Expand Down Expand Up @@ -107,6 +107,7 @@ def send_incident_alert_notification(
)
try:
client.send_trigger(attachment)
return True
except ApiError as e:
logger.info(
"rule.fail.pagerduty_metric_alert",
Expand Down
6 changes: 4 additions & 2 deletions src/sentry/integrations/slack/utils/notifications.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,14 @@ def send_incident_alert_notification(
metric_value: int,
new_status: IncidentStatus,
notification_uuid: str | None = None,
) -> None:
) -> bool:
# Make sure organization integration is still active:
integration, org_integration = integration_service.get_organization_context(
organization_id=incident.organization_id, integration_id=action.integration_id
)
if org_integration is None or integration is None or integration.status != ObjectStatus.ACTIVE:
# Integration removed, but rule is still active.
return
return False

chart_url = None
if features.has("organizations:metric-alert-chartcuterie", incident.organization):
Expand Down Expand Up @@ -64,8 +64,10 @@ def send_incident_alert_notification(
client = SlackClient(integration_id=integration.id)
try:
client.post("/chat.postMessage", data=payload, timeout=5)
return True
except ApiError:
logger.info("rule.fail.slack_post", exc_info=True)
return False


def send_slack_response(
Expand Down
5 changes: 3 additions & 2 deletions src/sentry/rules/actions/notify_event_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def send_incident_alert_notification(
new_status: IncidentStatus,
metric_value: str | None = None,
notification_uuid: str | None = None,
) -> None:
) -> bool:
"""
When a metric alert is triggered, send incident data to the SentryApp's webhook.
:param action: The triggered `AlertRuleTriggerAction`.
Expand All @@ -70,7 +70,7 @@ def send_incident_alert_notification(
incident, new_status, metric_value, notification_uuid
)

integration_service.send_incident_alert_notification(
success = integration_service.send_incident_alert_notification(
sentry_app_id=action.sentry_app_id,
action_id=action.id,
incident_id=incident.id,
Expand All @@ -79,6 +79,7 @@ def send_incident_alert_notification(
incident_attachment_json=json.dumps(incident_attachment),
metric_value=metric_value,
)
return success


def find_alert_rule_action_ui_component(app_platform_event: AppPlatformEvent) -> bool:
Expand Down
9 changes: 6 additions & 3 deletions src/sentry/services/hybrid_cloud/integration/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@ def send_incident_alert_notification(
incident_attachment_json: str,
metric_value: Optional[str] = None,
notification_uuid: str | None = None,
) -> None:
) -> bool:
sentry_app = SentryApp.objects.get(id=sentry_app_id)

metrics.incr("notifications.sent", instance=sentry_app.slug, skip_internal=False)
Expand All @@ -370,7 +370,7 @@ def send_incident_alert_notification(
},
exc_info=True,
)
return None
return False

app_platform_event = AppPlatformEvent(
resource="metric_alert",
Expand All @@ -395,16 +395,19 @@ def send_incident_alert_notification(
sentry_app_id=sentry_app.id,
event=f"{app_platform_event.resource}.{app_platform_event.action}",
)
return alert_rule_action_ui_component

def send_msteams_incident_alert_notification(
self, *, integration_id: int, channel: str, attachment: Dict[str, Any]
) -> None:
) -> bool:
integration = Integration.objects.get(id=integration_id)
client = MsTeamsClient(integration)
try:
client.send_card(channel, attachment)
return True
except ApiError:
logger.info("rule.fail.msteams_post", exc_info=True)
return False

def delete_integration(self, *, integration_id: int) -> None:
integration = Integration.objects.filter(id=integration_id).first()
Expand Down
4 changes: 2 additions & 2 deletions src/sentry/services/hybrid_cloud/integration/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,14 +264,14 @@ def send_incident_alert_notification(
incident_attachment_json: str,
metric_value: Optional[str] = None,
notification_uuid: Optional[str] = None,
) -> None:
) -> bool:
pass

@rpc_method
@abstractmethod
def send_msteams_incident_alert_notification(
self, *, integration_id: int, channel: str, attachment: Dict[str, Any]
) -> None:
) -> bool:
raise NotImplementedError

@rpc_method
Expand Down
14 changes: 14 additions & 0 deletions tests/sentry/incidents/action_handlers/test_email.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,20 @@ def test_fire_metric_alert(self):
def test_resolve_metric_alert(self):
self.run_fire_test("resolve")

@patch("sentry.analytics.record")
def test_alert_sent_recorded(self, mock_record):
self.run_fire_test()
mock_record.assert_called_with(
"alert.sent",
organization_id=self.organization.id,
project_id=self.project.id,
provider="email",
alert_id=self.alert_rule.id,
alert_type="metric_alert",
external_id=str(self.user.id),
notification_uuid="",
)


class EmailActionHandlerGetTargetsTest(TestCase):
@cached_property
Expand Down
Loading