|
25 | 25 | from sentry.models.grouphash import GroupHash
|
26 | 26 | from sentry.seer.similarity.grouping_records import CreateGroupingRecordData
|
27 | 27 | from sentry.seer.similarity.types import RawSeerSimilarIssueData
|
28 |
| -from sentry.seer.similarity.utils import MAX_FRAME_COUNT |
29 | 28 | from sentry.snuba.dataset import Dataset
|
30 | 29 | from sentry.snuba.referrer import Referrer
|
31 | 30 | from sentry.tasks.embeddings_grouping.backfill_seer_grouping_records_for_project import (
|
@@ -366,69 +365,6 @@ def test_lookup_group_data_stacktrace_bulk_no_stacktrace_exception(self):
|
366 | 365 | assert bulk_group_data_stacktraces["data"] == expected_group_data
|
367 | 366 | assert bulk_group_data_stacktraces["stacktrace_list"] == expected_stacktraces
|
368 | 367 |
|
369 |
| - @patch("sentry.seer.similarity.utils.logger") |
370 |
| - def test_lookup_group_data_stacktrace_bulk_too_many_system_frames(self, mock_logger): |
371 |
| - """ |
372 |
| - Test that if a group has over MAX_FRAME_COUNT only system frames, it is logged |
373 |
| - and included in results |
374 |
| - """ |
375 |
| - # Use 2 events |
376 |
| - rows, events, hashes = self.bulk_rows[:2], self.bulk_events[:2], {} |
377 |
| - group_ids = [row["group_id"] for row in rows] |
378 |
| - for group_id in group_ids: |
379 |
| - hashes.update({group_id: self.group_hashes[group_id]}) |
380 |
| - # Create one event where the stacktrace has over MAX_FRAME_COUNT system only frames |
381 |
| - exception: Any = copy.deepcopy(EXCEPTION) |
382 |
| - exception["values"][0]["stacktrace"]["frames"] += [ |
383 |
| - { |
384 |
| - "function": f"divide_by_zero_{i}", |
385 |
| - "module": "__main__", |
386 |
| - "filename": "python_onboarding_{i}.py", |
387 |
| - "abs_path": "/Users/user/python_onboarding/python_onboarding_{i}.py", |
388 |
| - "lineno": i, |
389 |
| - "in_app": True, |
390 |
| - } |
391 |
| - for i in range(MAX_FRAME_COUNT + 1) |
392 |
| - ] |
393 |
| - event = self.store_event( |
394 |
| - data={ |
395 |
| - "platform": "python", |
396 |
| - "exception": exception, |
397 |
| - "title": "title", |
398 |
| - "timestamp": before_now(seconds=10).isoformat(), |
399 |
| - }, |
400 |
| - project_id=self.project.id, |
401 |
| - assert_no_errors=False, |
402 |
| - ) |
403 |
| - rows.append({"event_id": event.event_id, "group_id": event.group_id}) |
404 |
| - group_hash = GroupHash.objects.filter(group_id=event.group.id).first() |
405 |
| - assert group_hash |
406 |
| - hashes.update({event.group_id: group_hash.hash}) |
407 |
| - events.append(event) |
408 |
| - |
409 |
| - bulk_group_data_stacktraces, _ = get_events_from_nodestore(self.project, rows, group_ids) |
410 |
| - expected_group_data = [ |
411 |
| - CreateGroupingRecordData( |
412 |
| - group_id=event.group.id, |
413 |
| - hash=hashes[event.group.id], |
414 |
| - project_id=self.project.id, |
415 |
| - exception_type=get_path(event.data, "exception", "values", -1, "type"), |
416 |
| - ) |
417 |
| - for event in events |
418 |
| - ] |
419 |
| - |
420 |
| - assert bulk_group_data_stacktraces["data"] == expected_group_data |
421 |
| - assert len(bulk_group_data_stacktraces["stacktrace_list"]) == len(events) |
422 |
| - |
423 |
| - mock_logger.info.assert_called_with( |
424 |
| - "grouping.similarity.over_threshold_system_only_frames", |
425 |
| - extra={ |
426 |
| - "project_id": self.project.id, |
427 |
| - "event_id": event.event_id, |
428 |
| - "hash": group_hash.hash, |
429 |
| - }, |
430 |
| - ) |
431 |
| - |
432 | 368 | def test_lookup_group_data_stacktrace_bulk_with_fallback_success(self):
|
433 | 369 | """Test successful bulk lookup with fallback, where the fallback isn't used"""
|
434 | 370 | rows, events, hashes = (
|
|
0 commit comments