33
33
from sentry .utils .snuba import bulk_snuba_queries
34
34
35
35
BATCH_SIZE = 20
36
- SEER_BACKFILL_DELAY_PER_RECORD = 0.1
37
36
BACKFILL_NAME = "backfill_grouping_records"
38
37
LAST_PROCESSED_REDIS_KEY = "grouping_record_backfill.last_processed_id"
39
38
@@ -67,6 +66,14 @@ def backfill_seer_grouping_records(
67
66
Task to backfill seer grouping_records table.
68
67
Pass in last_processed_id = 0 if running project for the first time, else None
69
68
"""
69
+ logger .info (
70
+ "backfill_seer_grouping_records.start" ,
71
+ extra = {
72
+ "project_id" : project_id ,
73
+ "last_processed_id" : last_processed_id ,
74
+ "dry_run" : dry_run ,
75
+ },
76
+ )
70
77
project = Project .objects .get_from_cache (id = project_id )
71
78
if not features .has ("projects:similarity-embeddings-backfill" , project ):
72
79
return
@@ -141,13 +148,14 @@ def backfill_seer_grouping_records(
141
148
project , rows , group_id_message_batch , group_hashes_dict
142
149
)
143
150
144
- response = post_bulk_grouping_records (
145
- CreateGroupingRecordsRequest (
146
- group_id_list = group_id_batch ,
147
- data = data ["data" ],
148
- stacktrace_list = data ["stacktrace_list" ],
151
+ with metrics .timer (f"{ BACKFILL_NAME } .post_bulk_grouping_records" , sample_rate = 1.0 ):
152
+ response = post_bulk_grouping_records (
153
+ CreateGroupingRecordsRequest (
154
+ group_id_list = group_id_batch ,
155
+ data = data ["data" ],
156
+ stacktrace_list = data ["stacktrace_list" ],
157
+ )
149
158
)
150
- )
151
159
if response ["success" ]:
152
160
groups = Group .objects .filter (project_id = project .id , id__in = group_id_batch )
153
161
for group in groups :
@@ -174,7 +182,6 @@ def backfill_seer_grouping_records(
174
182
) # needed for typing
175
183
backfill_seer_grouping_records .apply_async (
176
184
args = [project .id , last_processed_id ],
177
- countdown = BATCH_SIZE * SEER_BACKFILL_DELAY_PER_RECORD ,
178
185
)
179
186
return
180
187
@@ -204,15 +211,17 @@ def lookup_group_data_stacktrace_bulk_with_fallback(
204
211
"group_id" : group_id ,
205
212
"event_id" : event_id ,
206
213
}
207
- logger .info ("tasks.backfill_seer_grouping_records.event_lookup_error" , extra = extra )
214
+ logger .exception (
215
+ "tasks.backfill_seer_grouping_records.event_lookup_error" , extra = extra
216
+ )
208
217
continue
209
218
except KeyError :
210
219
extra = {
211
220
"organization_id" : project .organization .id ,
212
221
"project_id" : project .id ,
213
222
"group_id" : group_id ,
214
223
}
215
- logger .info ("tasks.backfill_seer_grouping_records.no_group_hash" , extra = extra )
224
+ logger .exception ("tasks.backfill_seer_grouping_records.no_group_hash" , extra = extra )
216
225
continue
217
226
218
227
return bulk_group_data_stacktraces
@@ -249,7 +258,7 @@ def lookup_group_data_stacktrace_bulk(
249
258
"group_data" : json .dumps (rows ),
250
259
"error" : e .message ,
251
260
}
252
- logger .info (
261
+ logger .exception (
253
262
"tasks.backfill_seer_grouping_records.bulk_event_lookup_exception" ,
254
263
extra = extra ,
255
264
)
@@ -322,7 +331,7 @@ def lookup_group_data_stacktrace_single(
322
331
"event_id" : event_id ,
323
332
"error" : e .message ,
324
333
}
325
- logger .info (
334
+ logger .exception (
326
335
"tasks.backfill_seer_grouping_records.event_lookup_exception" , extra = extra
327
336
)
328
337
0 commit comments