Skip to content

Commit fad3435

Browse files
authored
Merge branch 'master' into chloedisablenotifyslack
2 parents 3ca3e64 + d8a24af commit fad3435

File tree

118 files changed

+4506
-2465
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

118 files changed

+4506
-2465
lines changed

Diff for: bin/send_metrics.py

+169
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,169 @@
1+
# pylint: skip-file
2+
# flake8: noqa
3+
4+
"""
5+
Script that sends generic metrics messages to sentry locally
6+
7+
8+
Overview
9+
10+
This script is designed to be used when creating a new use case ID for the first
11+
time for the generic metrics platform.
12+
13+
14+
Usage
15+
16+
17+
python send_metrics.py
18+
19+
Without any command line argument, the script will send 3 metrics
20+
(counter/set/distribution) for each use case ID registered in
21+
src/sentry/sentry_metrics/use_case_id_registry.py.
22+
23+
24+
python send_metrics.py hello world
25+
26+
The script will treat any arguments supplied as a use case ID, and send 3 metrics
27+
(counter/set/distribution) for each use case ID specified.
28+
29+
"""
30+
31+
import datetime
32+
import itertools
33+
import json
34+
import pprint
35+
import random
36+
import string
37+
import sys
38+
39+
from arroyo.backends.kafka import KafkaPayload, KafkaProducer
40+
from arroyo.types import Topic
41+
42+
from sentry.sentry_metrics.use_case_id_registry import UseCaseID
43+
44+
BOOTSTRAP_HOST = "127.0.0.1:9092"
45+
TOPIC_NAME = "ingest-performance-metrics"
46+
47+
conf = {"bootstrap.servers": BOOTSTRAP_HOST}
48+
49+
make_counter_payload = lambda use_case, rand_str: {
50+
"name": f"c:{use_case}/{use_case}@none",
51+
"tags": {
52+
"environment": "production",
53+
"session.status": "init",
54+
f"gen_metric_e2e_{use_case}_counter_k_{rand_str}": f"gen_metric_e2e_{use_case}_counter_v_{rand_str}",
55+
},
56+
"timestamp": int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp()),
57+
"type": "c",
58+
"value": 1,
59+
"org_id": 1,
60+
"retention_days": 90,
61+
"project_id": 3,
62+
}
63+
64+
make_dist_payload = lambda use_case, rand_str: {
65+
"name": f"d:{use_case}/duration@second",
66+
"tags": {
67+
"environment": "production",
68+
"session.status": "healthy",
69+
f"gen_metric_e2e_{use_case}_dist_k_{rand_str}": f"gen_metric_e2e_{use_case}_dist_v_{rand_str}",
70+
},
71+
"timestamp": int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp()),
72+
"type": "d",
73+
"value": [4, 5, 6],
74+
"org_id": 1,
75+
"retention_days": 90,
76+
"project_id": 3,
77+
}
78+
79+
make_set_payload = lambda use_case, rand_str: {
80+
"name": f"s:{use_case}/error@none",
81+
"tags": {
82+
"environment": "production",
83+
"session.status": "errored",
84+
f"gen_metric_e2e_{use_case}_set_k_{rand_str}": f"gen_metric_e2e_{use_case}_set_v_{rand_str}",
85+
},
86+
"timestamp": int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp()),
87+
"type": "s",
88+
"value": [3],
89+
"org_id": 1,
90+
"retention_days": 90,
91+
"project_id": 3,
92+
}
93+
94+
make_psql = (
95+
lambda rand_str: f"""
96+
SELECT string,
97+
organization_id,
98+
date_added,
99+
use_case_id
100+
FROM sentry_perfstringindexer
101+
WHERE string ~ 'gen_metric_e2e_.*{rand_str}';
102+
"""
103+
)
104+
105+
make_csql = lambda rand_str: "UNION ALL".join(
106+
[
107+
f"""
108+
SELECT use_case_id,
109+
org_id,
110+
project_id,
111+
metric_id,
112+
timestamp,
113+
tags.key,
114+
tags.raw_value
115+
FROM {table_name}
116+
WHERE arrayExists(v -> match(v, 'gen_metric_e2e_.*{rand_str}'), tags.raw_value)
117+
"""
118+
for table_name in [
119+
"generic_metric_counters_raw_local",
120+
"generic_metric_distributions_raw_local",
121+
"generic_metric_sets_raw_local",
122+
]
123+
]
124+
)
125+
126+
127+
def produce_msgs(messages):
128+
producer = KafkaProducer(conf)
129+
for i, message in enumerate(messages):
130+
print(f"Sending message {i + 1} of {len(messages)}:")
131+
pprint.pprint(message)
132+
producer.produce(
133+
Topic(name=TOPIC_NAME),
134+
KafkaPayload(key=None, value=json.dumps(message).encode("utf-8"), headers=[]),
135+
)
136+
print("Done")
137+
print()
138+
139+
producer.close()
140+
141+
142+
if __name__ == "__main__":
143+
rand_str = "".join(random.choices(string.ascii_uppercase + string.digits, k=8))
144+
use_cases = (
145+
[use_case_id.value for use_case_id in UseCaseID if use_case_id is not UseCaseID.SESSIONS]
146+
if len(sys.argv) == 1
147+
else sys.argv[1:]
148+
)
149+
messages = list(
150+
itertools.chain.from_iterable(
151+
(
152+
make_counter_payload(use_case, rand_str),
153+
make_dist_payload(use_case, rand_str),
154+
make_set_payload(use_case, rand_str),
155+
)
156+
for use_case in use_cases
157+
)
158+
)
159+
random.shuffle(messages)
160+
161+
produce_msgs(messages)
162+
print(
163+
f"Use the following SQL to verify postgres, there should be {(strs_per_use_case := 6)} strings for each use cases, {strs_per_use_case * len(use_cases)} in total."
164+
)
165+
print(make_psql(rand_str))
166+
print(
167+
f"Use the following SQL to verify clickhouse, there should be {(metrics_per_use_case := 3)} metrics for each use cases, {metrics_per_use_case * len(use_cases)} in total."
168+
)
169+
print(make_csql(rand_str))

Diff for: codecov.yml

+5-3
Original file line numberDiff line numberDiff line change
@@ -51,20 +51,22 @@ flags:
5151
- "static/app/"
5252
carryforward: true
5353
# FE uploads 4 coverage reports. This property ensures codecov waits
54-
# for all reports to be uploaded before creating a GitHub status check.
54+
# for all reports to be uploaded before creating a GitHub status check.
55+
# NOTE: If you change this, make sure to change `comment.after_n_builds` below as well.
5556
after_n_builds: 4
5657
backend:
5758
paths:
5859
- "src/sentry/**/*.py"
5960
carryforward: true
60-
# Do not send any status checks until N coverage reports are uploaded
61+
# Do not send any status checks until n coverage reports are uploaded.
62+
# NOTE: If you change this, make sure to change `comment.after_n_builds` below as well.
6163
after_n_builds: 18
6264

6365
# Read more here: https://docs.codecov.com/docs/pull-request-comments
6466
comment:
6567
# This is the addition of carry forward builds and fresh builds, thus, it's the addition
6668
# of the FE and BE builds
67-
after_n_builds: 20
69+
after_n_builds: 22
6870
layout: "diff, files"
6971
# Update, if comment exists. Otherwise post new.
7072
behavior: default

Diff for: migrations_lockfile.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,5 @@ To resolve this, rebase against latest master and regenerate your migration. Thi
66
will then be regenerated, and you should be able to merge without conflicts.
77

88
nodestore: 0002_nodestore_no_dictfield
9-
sentry: 0519_remove_repo_name_constraint
9+
sentry: 0521_migrate_world_map_widgets
1010
social_auth: 0002_default_auto_field

Diff for: src/sentry/api/endpoints/project_performance_issue_settings.py

+5
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,7 @@ class ConfigurableThresholds(Enum):
5858
RENDER_BLOCKING_ASSET_FCP_RATIO = "render_blocking_fcp_ratio"
5959
SLOW_DB_QUERY_DURATION = "slow_db_query_duration_threshold"
6060
N_PLUS_API_CALLS_DURATION = "n_plus_one_api_calls_total_duration_threshold"
61+
CONSECUTIVE_HTTP_SPANS_MIN_TIME_SAVED = "consecutive_http_spans_min_time_saved_threshold"
6162

6263

6364
internal_only_project_settings_to_group_map: Dict[str, Type[GroupType]] = {
@@ -84,6 +85,7 @@ class ConfigurableThresholds(Enum):
8485
ConfigurableThresholds.RENDER_BLOCKING_ASSET_FCP_RATIO.value: InternalProjectOptions.RENDER_BLOCKING_ASSET.value,
8586
ConfigurableThresholds.SLOW_DB_QUERY_DURATION.value: InternalProjectOptions.SLOW_DB_QUERY.value,
8687
ConfigurableThresholds.N_PLUS_API_CALLS_DURATION.value: InternalProjectOptions.N_PLUS_ONE_API_CALLS.value,
88+
ConfigurableThresholds.CONSECUTIVE_HTTP_SPANS_MIN_TIME_SAVED.value: InternalProjectOptions.CONSECUTIVE_HTTP_SPANS.value,
8789
}
8890

8991

@@ -125,6 +127,9 @@ class ProjectPerformanceIssueSettingsSerializer(serializers.Serializer):
125127
n_plus_one_api_calls_total_duration_threshold = serializers.IntegerField(
126128
required=False, min_value=100, max_value=TEN_SECONDS # ms
127129
)
130+
consecutive_http_spans_min_time_saved_threshold = serializers.IntegerField(
131+
required=False, min_value=1000, max_value=TEN_SECONDS # ms
132+
)
128133
uncompressed_assets_detection_enabled = serializers.BooleanField(required=False)
129134
consecutive_http_spans_detection_enabled = serializers.BooleanField(required=False)
130135
large_http_payload_detection_enabled = serializers.BooleanField(required=False)

Diff for: src/sentry/api/serializers/models/alert_rule.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ def get_attrs(self, item_list, user, **kwargs):
4141

4242
sentry_app_installations_by_sentry_app_id = app_service.get_related_sentry_app_components(
4343
organization_ids=[alert_rule.organization_id for alert_rule in alert_rules.values()],
44-
sentry_app_ids=trigger_actions.values_list("sentry_app_id", flat=True),
44+
sentry_app_ids=list(trigger_actions.values_list("sentry_app_id", flat=True)),
4545
type="alert-rule-action",
4646
)
4747

Diff for: src/sentry/api_gateway/proxy.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -53,9 +53,9 @@ def proxy_request(request: HttpRequest, org_slug: str) -> StreamingHttpResponse:
5353

5454
try:
5555
region = get_region_for_organization(org_slug)
56-
except RegionResolutionError:
56+
except RegionResolutionError as e:
5757
logger.info("region_resolution_error", extra={"org_slug": org_slug})
58-
raise NotFound
58+
raise NotFound from e
5959

6060
target_url = region.to_url(request.path)
6161
header_dict = clean_proxy_headers(request.headers)

Diff for: src/sentry/conf/server.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1639,8 +1639,6 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str:
16391639
"organizations:device-class-synthesis": False,
16401640
# Enable the product selection feature in the getting started docs, regardless of the organization's strategy
16411641
"organizations:getting-started-doc-with-product-selection": False,
1642-
# Enable the onboarding heartbeat footer on the sdk setup page
1643-
"organizations:onboarding-heartbeat-footer": False,
16441642
# Enable a new behavior for deleting the freshly created project,
16451643
# if the user clicks on the back button in the onboarding for new orgs
16461644
"organizations:onboarding-project-deletion-on-back-click": False,
@@ -1684,6 +1682,8 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str:
16841682
"projects:alert-filters": True,
16851683
# Enable functionality to specify custom inbound filters on events.
16861684
"projects:custom-inbound-filters": False,
1685+
# Enable the new flat file indexing system for sourcemaps.
1686+
"organizations:sourcemaps-bundle-flat-file-indexing": False,
16871687
# Enable data forwarding functionality for projects.
16881688
"projects:data-forwarding": True,
16891689
# Enable functionality to discard groups.

0 commit comments

Comments
 (0)