Skip to content

Conversation

@shayna-ch
Copy link
Member

No description provided.

@github-actions github-actions bot added the Scope: Backend Automatically applied to PRs that change backend components label Nov 10, 2025
@semgrep-code-getsentry
Copy link

Semgrep found 1 ssc-aecabbe1-e60d-9dc0-a5bd-95001ace2360 finding:

Risk: Affected versions of Django are vulnerable to Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection'). SQL injection in Django's ORM column aliases: when using QuerySet.annotate(), QuerySet.alias(), QuerySet.aggregate(), or QuerySet.extra() with dictionary expansion (**kwargs), the dictionary keys are used unescaped as SQL column aliases. On MySQL and MariaDB backends, an attacker who can influence those keys (for example, by passing a crafted dict of annotations) can inject arbitrary SQL into the generated query.

Manual Review Advice: A vulnerability from this advisory is reachable if you are using Django with MySQL or MariaDB

Fix: Upgrade this library to at least version 5.2.7 at sentry/uv.lock:305.

Reference(s): GHSA-hpr9-3m2g-3j9p, CVE-2025-59681

@codecov
Copy link

codecov bot commented Nov 10, 2025

❌ 12 Tests Failed:

Tests completed Failed Passed Skipped
29566 12 29554 243
View the top 3 failed test(s) by shortest run time
tests.sentry.event_manager.grouping.test_seer_grouping.SeerEventManagerGroupingTest::test_bypasses_seer_if_group_found
Stack Traces | 3.54s run time
#x1B[1m#x1B[.../event_manager/grouping/test_seer_grouping.py#x1B[0m:132: in test_bypasses_seer_if_group_found
    assert mock_get_seer_similar_issues.call_count == 1  # didn't get called again
#x1B[1m#x1B[31mE   AssertionError: assert 2 == 1#x1B[0m
#x1B[1m#x1B[31mE    +  where 2 = <MagicMock name='get_seer_similar_issues' id='140075606336448'>.call_count#x1B[0m
tests.sentry.event_manager.grouping.test_group_creation_lock::test_group_creation_race[ lock_disabled: False ]
Stack Traces | 3.54s run time
#x1B[1m#x1B[.../event_manager/grouping/test_group_creation_lock.py#x1B[0m:96: in test_group_creation_race
    assert len({group_info.group.id for group_info in return_values}) == 1
#x1B[1m#x1B[31mE   assert 0 == 1#x1B[0m
#x1B[1m#x1B[31mE    +  where 0 = len(set())#x1B[0m

#x1B[33mDuring handling of the above exception, another exception occurred:#x1B[0m
#x1B[1m#x1B[31m.venv/lib/python3.13....../site-packages/_pytest/runner.py#x1B[0m:340: in from_call
    result: Optional[TResult] = func()
#x1B[1m#x1B[31m.venv/lib/python3.13....../site-packages/_pytest/runner.py#x1B[0m:240: in <lambda>
    lambda: runtest_hook(item=item, **kwds), when=when, reraise=reraise
#x1B[1m#x1B[31m.venv/lib/python3.13.../site-packages/pluggy/_hooks.py#x1B[0m:513: in __call__
    return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult)
#x1B[1m#x1B[31m.venv/lib/python3.13.../site-packages/pluggy/_manager.py#x1B[0m:120: in _hookexec
    return self._inner_hookexec(hook_name, methods, kwargs, firstresult)
#x1B[1m#x1B[31m.venv/lib/python3.13....../site-packages/_pytest/threadexception.py#x1B[0m:87: in pytest_runtest_call
    yield from thread_exception_runtest_hook()
#x1B[1m#x1B[31m.venv/lib/python3.13....../site-packages/_pytest/threadexception.py#x1B[0m:77: in thread_exception_runtest_hook
    warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg))
#x1B[1m#x1B[31mE   pytest.PytestUnhandledThreadExceptionWarning: Exception in thread Thread-44 (save_event)#x1B[0m
#x1B[1m#x1B[31mE   #x1B[0m
#x1B[1m#x1B[31mE   Traceback (most recent call last):#x1B[0m
#x1B[1m#x1B[31mE     File ".../hostedtoolcache/Python/3.13.1....../x64/lib/python3.13/threading.py", line 1041, in _bootstrap_inner#x1B[0m
#x1B[1m#x1B[31mE       self.run()#x1B[0m
#x1B[1m#x1B[31mE       ~~~~~~~~^^#x1B[0m
#x1B[1m#x1B[31mE     File ".../hostedtoolcache/Python/3.13.1....../x64/lib/python3.13/threading.py", line 992, in run#x1B[0m
#x1B[1m#x1B[31mE       self._target(*self._args, **self._kwargs)#x1B[0m
#x1B[1m#x1B[31mE       ~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^#x1B[0m
#x1B[1m#x1B[31mE     File ".../event_manager/grouping/test_group_creation_lock.py", line 29, in save_event#x1B[0m
#x1B[1m#x1B[31mE       group_info = assign_event_to_group(#x1B[0m
#x1B[1m#x1B[31mE           event=event,#x1B[0m
#x1B[1m#x1B[31mE           job={"event_metadata": {}, "release": "dogpark", "event": event, "data": {}},#x1B[0m
#x1B[1m#x1B[31mE           metric_tags={},#x1B[0m
#x1B[1m#x1B[31mE       )#x1B[0m
#x1B[1m#x1B[31mE     File ".../sentry/sentry/.venv/lib/python3.13....../site-packages/sentry_sdk/tracing_utils.py", line 888, in sync_wrapper#x1B[0m
#x1B[1m#x1B[31mE       return f(*args, **kwargs)#x1B[0m
#x1B[1m#x1B[31mE     File ".../src/sentry/event_manager.py", line 1284, in assign_event_to_group#x1B[0m
#x1B[1m#x1B[31mE       primary = get_hashes_and_grouphashes(job, run_primary_grouping, metric_tags)#x1B[0m
#x1B[1m#x1B[31mE     File ".../sentry/sentry/.venv/lib/python3.13....../site-packages/sentry_sdk/tracing_utils.py", line 888, in sync_wrapper#x1B[0m
#x1B[1m#x1B[31mE       return f(*args, **kwargs)#x1B[0m
#x1B[1m#x1B[31mE     File ".../src/sentry/event_manager.py", line 1366, in get_hashes_and_grouphashes#x1B[0m
#x1B[1m#x1B[31mE       grouphashes = get_or_create_grouphashes(#x1B[0m
#x1B[1m#x1B[31mE           event, project, variants, hashes, grouping_config["id"]#x1B[0m
#x1B[1m#x1B[31mE       )#x1B[0m
#x1B[1m#x1B[31mE     File ".../grouping/ingest/hashing.py", line 236, in get_or_create_grouphashes#x1B[0m
#x1B[1m#x1B[31mE       grouphash = cache.get(cache_key)#x1B[0m
#x1B[1m#x1B[31mE     File ".../sentry/sentry/.venv/lib/python3.13.../cache/backends/locmem.py", line 36, in get#x1B[0m
#x1B[1m#x1B[31mE       key = self.make_and_validate_key(key, version=version)#x1B[0m
#x1B[1m#x1B[31mE     File ".../sentry/sentry/.venv/lib/python3.13.../cache/backends/base.py", line 124, in make_and_validate_key#x1B[0m
#x1B[1m#x1B[31mE       self.validate_key(key)#x1B[0m
#x1B[1m#x1B[31mE       ~~~~~~~~~~~~~~~~~^^^^^#x1B[0m
#x1B[1m#x1B[31mE     File ".../sentry/sentry/.venv/lib/python3.13.../cache/backends/base.py", line 119, in validate_key#x1B[0m
#x1B[1m#x1B[31mE       warnings.warn(warning, CacheKeyWarning)#x1B[0m
#x1B[1m#x1B[31mE       ~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^#x1B[0m
#x1B[1m#x1B[31mE   django.core.cache.backends.base.CacheKeyWarning: Cache key contains characters that will cause errors if used with memcached: ':2:grouphash:4557091338846210:pound sign'#x1B[0m
tests.sentry.event_manager.grouping.test_assign_to_group::test_existing_group_new_hash_exists[ secondary_hash_exists: False - in_transition: False ]
Stack Traces | 3.72s run time
#x1B[1m#x1B[.../event_manager/grouping/test_assign_to_group.py#x1B[0m:417: in test_existing_group_new_hash_exists
    assert results == {
#x1B[1m#x1B[31mE   AssertionError: assert {'event_assig...y': True, ...} == {'event_assig...y': True, ...}#x1B[0m
#x1B[1m#x1B[31mE     #x1B[0m
#x1B[1m#x1B[31mE     Omitting 11 identical items, use -vv to show#x1B[0m
#x1B[1m#x1B[31mE     Differing items:#x1B[0m
#x1B[1m#x1B[31mE     {'result_tag_value_for_metrics': 'no_match'} != {'result_tag_value_for_metrics': 'found_primary'}#x1B[0m
#x1B[1m#x1B[31mE     #x1B[0m
#x1B[1m#x1B[31mE     Full diff:#x1B[0m
#x1B[1m#x1B[31mE       {#x1B[0m
#x1B[1m#x1B[31mE           'event_assigned_to_given_existing_group': True,#x1B[0m
#x1B[1m#x1B[31mE           'hashes_different': None,#x1B[0m
#x1B[1m#x1B[31mE           'new_group_created': False,#x1B[0m
#x1B[1m#x1B[31mE           'primary_grouphash_existed_already': True,#x1B[0m
#x1B[1m#x1B[31mE           'primary_grouphash_exists_now': True,#x1B[0m
#x1B[1m#x1B[31mE           'primary_hash_calculated': True,#x1B[0m
#x1B[1m#x1B[31mE           'primary_hash_found': True,#x1B[0m
#x1B[1m#x1B[31mE     -     'result_tag_value_for_metrics': 'found_primary',#x1B[0m
#x1B[1m#x1B[31mE     ?                                      ^ --- ---  ^^#x1B[0m
#x1B[1m#x1B[31mE     +     'result_tag_value_for_metrics': 'no_match',#x1B[0m
#x1B[1m#x1B[31mE     ?                                      ^    ^^^#x1B[0m
#x1B[1m#x1B[31mE           'secondary_grouphash_existed_already': None,#x1B[0m
#x1B[1m#x1B[31mE           'secondary_grouphash_exists_now': None,#x1B[0m
#x1B[1m#x1B[31mE           'secondary_hash_calculated': False,#x1B[0m
#x1B[1m#x1B[31mE           'secondary_hash_found': None,#x1B[0m
#x1B[1m#x1B[31mE       }#x1B[0m
tests.sentry.event_manager.grouping.test_assign_to_group::test_existing_group_new_hash_exists[ secondary_hash_exists: False - in_transition: True ]
Stack Traces | 3.75s run time
#x1B[1m#x1B[.../event_manager/grouping/test_assign_to_group.py#x1B[0m:417: in test_existing_group_new_hash_exists
    assert results == {
#x1B[1m#x1B[31mE   AssertionError: assert {'event_assig...y': True, ...} == {'event_assig...y': True, ...}#x1B[0m
#x1B[1m#x1B[31mE     #x1B[0m
#x1B[1m#x1B[31mE     Omitting 6 identical items, use -vv to show#x1B[0m
#x1B[1m#x1B[31mE     Differing items:#x1B[0m
#x1B[1m#x1B[31mE     {'hashes_different': True} != {'hashes_different': None}#x1B[0m
#x1B[1m#x1B[31mE     {'result_tag_value_for_metrics': 'no_match'} != {'result_tag_value_for_metrics': 'found_primary'}#x1B[0m
#x1B[1m#x1B[31mE     {'secondary_hash_found': False} != {'secondary_hash_found': None}#x1B[0m
#x1B[1m#x1B[31mE     {'secondary_hash_calculated': True} != {'secondary_hash_calculated': False}#x1B[0m
#x1B[1m#x1B[31mE     {'secondary_grouphash_exists_now': False} != {'secondary_grouphash_exists_now': None}#x1B[0m
#x1B[1m#x1B[31mE     {'secondary_grouphash_existed_already': False} != {'secondary_grouphash_existed_already': None}#x1B[0m
#x1B[1m#x1B[31mE     #x1B[0m
#x1B[1m#x1B[31mE     Full diff:#x1B[0m
#x1B[1m#x1B[31mE       {#x1B[0m
#x1B[1m#x1B[31mE           'event_assigned_to_given_existing_group': True,#x1B[0m
#x1B[1m#x1B[31mE     -     'hashes_different': None,#x1B[0m
#x1B[1m#x1B[31mE     ?                         ^^^#x1B[0m
#x1B[1m#x1B[31mE     +     'hashes_different': True,#x1B[0m
#x1B[1m#x1B[31mE     ?                         ^^^#x1B[0m
#x1B[1m#x1B[31mE           'new_group_created': False,#x1B[0m
#x1B[1m#x1B[31mE           'primary_grouphash_existed_already': True,#x1B[0m
#x1B[1m#x1B[31mE           'primary_grouphash_exists_now': True,#x1B[0m
#x1B[1m#x1B[31mE           'primary_hash_calculated': True,#x1B[0m
#x1B[1m#x1B[31mE           'primary_hash_found': True,#x1B[0m
#x1B[1m#x1B[31mE     -     'result_tag_value_for_metrics': 'found_primary',#x1B[0m
#x1B[1m#x1B[31mE     ?                                      ^ --- ---  ^^#x1B[0m
#x1B[1m#x1B[31mE     +     'result_tag_value_for_metrics': 'no_match',#x1B[0m
#x1B[1m#x1B[31mE     ?                                      ^    ^^^#x1B[0m
#x1B[1m#x1B[31mE     -     'secondary_grouphash_existed_already': None,#x1B[0m
#x1B[1m#x1B[31mE     ?                                            ^^^#x1B[0m
#x1B[1m#x1B[31mE     +     'secondary_grouphash_existed_already': False,#x1B[0m
#x1B[1m#x1B[31mE     ?                                            ^^^^#x1B[0m
#x1B[1m#x1B[31mE     -     'secondary_grouphash_exists_now': None,#x1B[0m
#x1B[1m#x1B[31mE     ?                                       ^^^#x1B[0m
#x1B[1m#x1B[31mE     +     'secondary_grouphash_exists_now': False,#x1B[0m
#x1B[1m#x1B[31mE     ?                                       ^^^^#x1B[0m
#x1B[1m#x1B[31mE     -     'secondary_hash_calculated': False,#x1B[0m
#x1B[1m#x1B[31mE     ?                                  ^^^^#x1B[0m
#x1B[1m#x1B[31mE     +     'secondary_hash_calculated': True,#x1B[0m
#x1B[1m#x1B[31mE     ?                                  ^^^#x1B[0m
#x1B[1m#x1B[31mE     -     'secondary_hash_found': None,#x1B[0m
#x1B[1m#x1B[31mE     ?                             ^^^#x1B[0m
#x1B[1m#x1B[31mE     +     'secondary_hash_found': False,#x1B[0m
#x1B[1m#x1B[31mE     ?                             ^^^^#x1B[0m
#x1B[1m#x1B[31mE       }#x1B[0m
tests.sentry.event_manager.grouping.test_group_creation_lock::test_group_creation_race[ lock_disabled: True ]
Stack Traces | 4.02s run time
#x1B[1m#x1B[.../event_manager/grouping/test_group_creation_lock.py#x1B[0m:100: in test_group_creation_race
    assert 1 < len({group_info.group.id for group_info in return_values}) <= CONCURRENCY
#x1B[1m#x1B[31mE   assert 1 < 0#x1B[0m
#x1B[1m#x1B[31mE    +  where 0 = len(set())#x1B[0m

#x1B[33mDuring handling of the above exception, another exception occurred:#x1B[0m
#x1B[1m#x1B[31m.venv/lib/python3.13....../site-packages/_pytest/runner.py#x1B[0m:340: in from_call
    result: Optional[TResult] = func()
#x1B[1m#x1B[31m.venv/lib/python3.13....../site-packages/_pytest/runner.py#x1B[0m:240: in <lambda>
    lambda: runtest_hook(item=item, **kwds), when=when, reraise=reraise
#x1B[1m#x1B[31m.venv/lib/python3.13.../site-packages/pluggy/_hooks.py#x1B[0m:513: in __call__
    return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult)
#x1B[1m#x1B[31m.venv/lib/python3.13.../site-packages/pluggy/_manager.py#x1B[0m:120: in _hookexec
    return self._inner_hookexec(hook_name, methods, kwargs, firstresult)
#x1B[1m#x1B[31m.venv/lib/python3.13....../site-packages/_pytest/threadexception.py#x1B[0m:87: in pytest_runtest_call
    yield from thread_exception_runtest_hook()
#x1B[1m#x1B[31m.venv/lib/python3.13....../site-packages/_pytest/threadexception.py#x1B[0m:77: in thread_exception_runtest_hook
    warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg))
#x1B[1m#x1B[31mE   pytest.PytestUnhandledThreadExceptionWarning: Exception in thread Thread-35 (save_event)#x1B[0m
#x1B[1m#x1B[31mE   #x1B[0m
#x1B[1m#x1B[31mE   Traceback (most recent call last):#x1B[0m
#x1B[1m#x1B[31mE     File ".../hostedtoolcache/Python/3.13.1....../x64/lib/python3.13/threading.py", line 1041, in _bootstrap_inner#x1B[0m
#x1B[1m#x1B[31mE       self.run()#x1B[0m
#x1B[1m#x1B[31mE       ~~~~~~~~^^#x1B[0m
#x1B[1m#x1B[31mE     File ".../hostedtoolcache/Python/3.13.1....../x64/lib/python3.13/threading.py", line 992, in run#x1B[0m
#x1B[1m#x1B[31mE       self._target(*self._args, **self._kwargs)#x1B[0m
#x1B[1m#x1B[31mE       ~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^#x1B[0m
#x1B[1m#x1B[31mE     File ".../event_manager/grouping/test_group_creation_lock.py", line 29, in save_event#x1B[0m
#x1B[1m#x1B[31mE       group_info = assign_event_to_group(#x1B[0m
#x1B[1m#x1B[31mE           event=event,#x1B[0m
#x1B[1m#x1B[31mE           job={"event_metadata": {}, "release": "dogpark", "event": event, "data": {}},#x1B[0m
#x1B[1m#x1B[31mE           metric_tags={},#x1B[0m
#x1B[1m#x1B[31mE       )#x1B[0m
#x1B[1m#x1B[31mE     File ".../sentry/sentry/.venv/lib/python3.13....../site-packages/sentry_sdk/tracing_utils.py", line 888, in sync_wrapper#x1B[0m
#x1B[1m#x1B[31mE       return f(*args, **kwargs)#x1B[0m
#x1B[1m#x1B[31mE     File ".../src/sentry/event_manager.py", line 1284, in assign_event_to_group#x1B[0m
#x1B[1m#x1B[31mE       primary = get_hashes_and_grouphashes(job, run_primary_grouping, metric_tags)#x1B[0m
#x1B[1m#x1B[31mE     File ".../sentry/sentry/.venv/lib/python3.13....../site-packages/sentry_sdk/tracing_utils.py", line 888, in sync_wrapper#x1B[0m
#x1B[1m#x1B[31mE       return f(*args, **kwargs)#x1B[0m
#x1B[1m#x1B[31mE     File ".../src/sentry/event_manager.py", line 1366, in get_hashes_and_grouphashes#x1B[0m
#x1B[1m#x1B[31mE       grouphashes = get_or_create_grouphashes(#x1B[0m
#x1B[1m#x1B[31mE           event, project, variants, hashes, grouping_config["id"]#x1B[0m
#x1B[1m#x1B[31mE       )#x1B[0m
#x1B[1m#x1B[31mE     File ".../grouping/ingest/hashing.py", line 236, in get_or_create_grouphashes#x1B[0m
#x1B[1m#x1B[31mE       grouphash = cache.get(cache_key)#x1B[0m
#x1B[1m#x1B[31mE     File ".../sentry/sentry/.venv/lib/python3.13.../cache/backends/locmem.py", line 36, in get#x1B[0m
#x1B[1m#x1B[31mE       key = self.make_and_validate_key(key, version=version)#x1B[0m
#x1B[1m#x1B[31mE     File ".../sentry/sentry/.venv/lib/python3.13.../cache/backends/base.py", line 124, in make_and_validate_key#x1B[0m
#x1B[1m#x1B[31mE       self.validate_key(key)#x1B[0m
#x1B[1m#x1B[31mE       ~~~~~~~~~~~~~~~~~^^^^^#x1B[0m
#x1B[1m#x1B[31mE     File ".../sentry/sentry/.venv/lib/python3.13.../cache/backends/base.py", line 119, in validate_key#x1B[0m
#x1B[1m#x1B[31mE       warnings.warn(warning, CacheKeyWarning)#x1B[0m
#x1B[1m#x1B[31mE       ~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^#x1B[0m
#x1B[1m#x1B[31mE   django.core.cache.backends.base.CacheKeyWarning: Cache key contains characters that will cause errors if used with memcached: ':2:grouphash:4557091344875522:pound sign'#x1B[0m
tests.sentry.notifications.test_notifications.ActivityNotificationTest::test_sends_regression_notification
Stack Traces | 4.12s run time
#x1B[1m#x1B[.../sentry/notifications/test_notifications.py#x1B[0m:399: in test_sends_regression_notification
    event2 = manager.save(self.project.id)
#x1B[1m#x1B[31m.venv/lib/python3.13............/site-packages/sentry_sdk/tracing_utils.py#x1B[0m:904: in sync_wrapper
    result = f(*args, **kwargs)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:503: in save
    return self.save_error_events(
#x1B[1m#x1B[31m.venv/lib/python3.13............/site-packages/sentry_sdk/tracing_utils.py#x1B[0m:904: in sync_wrapper
    result = f(*args, **kwargs)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:537: in save_error_events
    group_info = assign_event_to_group(event=job["event"], job=job, metric_tags=metric_tags)
#x1B[1m#x1B[31m.venv/lib/python3.13............/site-packages/sentry_sdk/tracing_utils.py#x1B[0m:904: in sync_wrapper
    result = f(*args, **kwargs)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:1312: in assign_event_to_group
    group_info = create_group_with_grouphashes(job, all_grouphashes)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:1498: in create_group_with_grouphashes
    return handle_existing_grouphash(job, existing_grouphash, grouphashes)
#x1B[1m#x1B[31m.venv/lib/python3.13............/site-packages/sentry_sdk/tracing_utils.py#x1B[0m:904: in sync_wrapper
    result = f(*args, **kwargs)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:1423: in handle_existing_grouphash
    is_regression = _process_existing_aggregate(
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:1879: in _process_existing_aggregate
    is_regression = _handle_regression(group, event, release)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:1792: in _handle_regression
    activity = Activity.objects.create_group_activity(
#x1B[1m#x1B[.../sentry/models/activity.py#x1B[0m:108: in create_group_activity
    activity.send_notification()
#x1B[1m#x1B[.../sentry/models/activity.py#x1B[0m:220: in send_notification
    activity.send_activity_notifications.delay(self.id)
#x1B[1m#x1B[.../sentry/silo/base.py#x1B[0m:159: in override
    return original_method(*args, **kwargs)
#x1B[1m#x1B[.../sentry/taskworker/task.py#x1B[0m:98: in delay
    self.apply_async(args=args, kwargs=kwargs)
#x1B[1m#x1B[.../sentry/silo/base.py#x1B[0m:159: in override
    return original_method(*args, **kwargs)
#x1B[1m#x1B[.../sentry/taskworker/task.py#x1B[0m:128: in apply_async
    self._func(*args, **kwargs)
#x1B[1m#x1B[.../sentry/tasks/activity.py#x1B[0m:45: in send_activity_notifications
    notifier.notify_about_activity(activity)
#x1B[1m#x1B[.../sentry/mail/adapter.py#x1B[0m:172: in notify_about_activity
    email_cls(activity).send()
#x1B[1m#x1B[.../notifications/activity/base.py#x1B[0m:72: in send
    return send_activity_notification(self)
#x1B[1m#x1B[.../notifications/utils/__init__.py#x1B[0m:359: in send_activity_notification
    participants_by_provider = notification.get_participants_with_group_subscription_reason()
#x1B[1m#x1B[.../notifications/activity/base.py#x1B[0m:109: in get_participants_with_group_subscription_reason
    return get_participants_for_group(self.group, self.activity.user_id)
#x1B[1m#x1B[.../notifications/utils/participants.py#x1B[0m:135: in get_participants_for_group
    participants_by_provider: ParticipantMap = GroupSubscription.objects.get_participants(group)
#x1B[1m#x1B[.../sentry/models/groupsubscription.py#x1B[0m:145: in get_participants
    all_possible_actors = Actor.many_from_object(group.project.get_members_as_rpc_users())
#x1B[1m#x1B[.../sentry/models/project.py#x1B[0m:488: in get_members_as_rpc_users
    return user_service.get_many_by_id(ids=list(member_ids))
#x1B[1m#x1B[.../services/user/service.py#x1B[0m:120: in get_many_by_id
    return get_many_by_id(ids)
#x1B[1m#x1B[.../rpc/caching/service.py#x1B[0m:228: in __call__
    return self.get_many(ids)
#x1B[1m#x1B[.../rpc/caching/service.py#x1B[0m:267: in get_many
    cb_result = self.cb(missing_keys)
#x1B[1m#x1B[.../services/user/service.py#x1B[0m:328: in get_many_by_id
    return user_service.get_many(filter={"user_ids": ids})
#x1B[1m#x1B[.../hybridcloud/rpc/service.py#x1B[0m:354: in remote_method
    return dispatch_remote_call(
#x1B[1m#x1B[.../hybridcloud/rpc/service.py#x1B[0m:476: in dispatch_remote_call
    return remote_silo_call.dispatch(use_test_client)
#x1B[1m#x1B[.../hybridcloud/rpc/service.py#x1B[0m:511: in dispatch
    serial_response = self._send_to_remote_silo(use_test_client)
#x1B[1m#x1B[.../hybridcloud/rpc/service.py#x1B[0m:572: in _send_to_remote_silo
    response = self._fire_test_request(headers, data)
#x1B[1m#x1B[.../hybridcloud/rpc/service.py#x1B[0m:631: in _fire_test_request
    in_test_assert_no_transaction(
#x1B[1m#x1B[.../db/postgres/transactions.py#x1B[0m:102: in in_test_assert_no_transaction
    assert not hybrid_cloud.simulated_transaction_watermarks.connection_transaction_depth_above_watermark(
#x1B[1m#x1B[31mE   AssertionError: remote service method to .../rpc/user/get_many/ called inside transaction!  Move service calls to outside of transactions.#x1B[0m
tests.sentry.event_manager.grouping.test_assign_to_group::test_existing_group_no_new_hash[ in_transition: True ]
Stack Traces | 4.16s run time
#x1B[1m#x1B[.../event_manager/grouping/test_assign_to_group.py#x1B[0m:333: in test_existing_group_no_new_hash
    assert results == {
#x1B[1m#x1B[31mE   AssertionError: assert {'event_assig...': False, ...} == {'event_assig...': False, ...}#x1B[0m
#x1B[1m#x1B[31mE     #x1B[0m
#x1B[1m#x1B[31mE     Omitting 11 identical items, use -vv to show#x1B[0m
#x1B[1m#x1B[31mE     Differing items:#x1B[0m
#x1B[1m#x1B[31mE     {'result_tag_value_for_metrics': 'no_match'} != {'result_tag_value_for_metrics': 'found_secondary'}#x1B[0m
#x1B[1m#x1B[31mE     #x1B[0m
#x1B[1m#x1B[31mE     Full diff:#x1B[0m
#x1B[1m#x1B[31mE       {#x1B[0m
#x1B[1m#x1B[31mE           'event_assigned_to_given_existing_group': True,#x1B[0m
#x1B[1m#x1B[31mE           'hashes_different': True,#x1B[0m
#x1B[1m#x1B[31mE           'new_group_created': False,#x1B[0m
#x1B[1m#x1B[31mE           'primary_grouphash_existed_already': False,#x1B[0m
#x1B[1m#x1B[31mE           'primary_grouphash_exists_now': True,#x1B[0m
#x1B[1m#x1B[31mE           'primary_hash_calculated': True,#x1B[0m
#x1B[1m#x1B[31mE           'primary_hash_found': False,#x1B[0m
#x1B[1m#x1B[31mE     -     'result_tag_value_for_metrics': 'found_secondary',#x1B[0m
#x1B[1m#x1B[31mE     ?                                      ^ --- ^^ ^^^^^^#x1B[0m
#x1B[1m#x1B[31mE     +     'result_tag_value_for_metrics': 'no_match',#x1B[0m
#x1B[1m#x1B[31mE     ?                                      ^  ^^^ ^#x1B[0m
#x1B[1m#x1B[31mE           'secondary_grouphash_existed_already': True,#x1B[0m
#x1B[1m#x1B[31mE           'secondary_grouphash_exists_now': True,#x1B[0m
#x1B[1m#x1B[31mE           'secondary_hash_calculated': True,#x1B[0m
#x1B[1m#x1B[31mE           'secondary_hash_found': True,#x1B[0m
#x1B[1m#x1B[31mE       }#x1B[0m
tests.sentry.event_manager.test_event_manager.EventManagerTest::test_unresolves_group_with_auto_resolve
Stack Traces | 4.17s run time
#x1B[1m#x1B[.../sentry/event_manager/test_event_manager.py#x1B[0m:1172: in test_unresolves_group_with_auto_resolve
    event2 = manager.save(self.project.id)
#x1B[1m#x1B[31m.venv/lib/python3.13............/site-packages/sentry_sdk/tracing_utils.py#x1B[0m:904: in sync_wrapper
    result = f(*args, **kwargs)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:503: in save
    return self.save_error_events(
#x1B[1m#x1B[31m.venv/lib/python3.13............/site-packages/sentry_sdk/tracing_utils.py#x1B[0m:904: in sync_wrapper
    result = f(*args, **kwargs)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:537: in save_error_events
    group_info = assign_event_to_group(event=job["event"], job=job, metric_tags=metric_tags)
#x1B[1m#x1B[31m.venv/lib/python3.13............/site-packages/sentry_sdk/tracing_utils.py#x1B[0m:904: in sync_wrapper
    result = f(*args, **kwargs)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:1312: in assign_event_to_group
    group_info = create_group_with_grouphashes(job, all_grouphashes)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:1498: in create_group_with_grouphashes
    return handle_existing_grouphash(job, existing_grouphash, grouphashes)
#x1B[1m#x1B[31m.venv/lib/python3.13............/site-packages/sentry_sdk/tracing_utils.py#x1B[0m:904: in sync_wrapper
    result = f(*args, **kwargs)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:1423: in handle_existing_grouphash
    is_regression = _process_existing_aggregate(
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:1879: in _process_existing_aggregate
    is_regression = _handle_regression(group, event, release)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:1792: in _handle_regression
    activity = Activity.objects.create_group_activity(
#x1B[1m#x1B[.../sentry/models/activity.py#x1B[0m:108: in create_group_activity
    activity.send_notification()
#x1B[1m#x1B[.../sentry/models/activity.py#x1B[0m:220: in send_notification
    activity.send_activity_notifications.delay(self.id)
#x1B[1m#x1B[.../sentry/silo/base.py#x1B[0m:159: in override
    return original_method(*args, **kwargs)
#x1B[1m#x1B[.../sentry/taskworker/task.py#x1B[0m:98: in delay
    self.apply_async(args=args, kwargs=kwargs)
#x1B[1m#x1B[.../sentry/silo/base.py#x1B[0m:159: in override
    return original_method(*args, **kwargs)
#x1B[1m#x1B[.../sentry/taskworker/task.py#x1B[0m:128: in apply_async
    self._func(*args, **kwargs)
#x1B[1m#x1B[.../sentry/tasks/activity.py#x1B[0m:45: in send_activity_notifications
    notifier.notify_about_activity(activity)
#x1B[1m#x1B[.../sentry/mail/adapter.py#x1B[0m:172: in notify_about_activity
    email_cls(activity).send()
#x1B[1m#x1B[.../notifications/activity/base.py#x1B[0m:72: in send
    return send_activity_notification(self)
#x1B[1m#x1B[.../notifications/utils/__init__.py#x1B[0m:359: in send_activity_notification
    participants_by_provider = notification.get_participants_with_group_subscription_reason()
#x1B[1m#x1B[.../notifications/activity/base.py#x1B[0m:109: in get_participants_with_group_subscription_reason
    return get_participants_for_group(self.group, self.activity.user_id)
#x1B[1m#x1B[.../notifications/utils/participants.py#x1B[0m:135: in get_participants_for_group
    participants_by_provider: ParticipantMap = GroupSubscription.objects.get_participants(group)
#x1B[1m#x1B[.../sentry/models/groupsubscription.py#x1B[0m:145: in get_participants
    all_possible_actors = Actor.many_from_object(group.project.get_members_as_rpc_users())
#x1B[1m#x1B[.../sentry/models/project.py#x1B[0m:488: in get_members_as_rpc_users
    return user_service.get_many_by_id(ids=list(member_ids))
#x1B[1m#x1B[.../services/user/service.py#x1B[0m:120: in get_many_by_id
    return get_many_by_id(ids)
#x1B[1m#x1B[.../rpc/caching/service.py#x1B[0m:228: in __call__
    return self.get_many(ids)
#x1B[1m#x1B[.../rpc/caching/service.py#x1B[0m:267: in get_many
    cb_result = self.cb(missing_keys)
#x1B[1m#x1B[.../services/user/service.py#x1B[0m:328: in get_many_by_id
    return user_service.get_many(filter={"user_ids": ids})
#x1B[1m#x1B[.../hybridcloud/rpc/service.py#x1B[0m:354: in remote_method
    return dispatch_remote_call(
#x1B[1m#x1B[.../hybridcloud/rpc/service.py#x1B[0m:476: in dispatch_remote_call
    return remote_silo_call.dispatch(use_test_client)
#x1B[1m#x1B[.../hybridcloud/rpc/service.py#x1B[0m:511: in dispatch
    serial_response = self._send_to_remote_silo(use_test_client)
#x1B[1m#x1B[.../hybridcloud/rpc/service.py#x1B[0m:572: in _send_to_remote_silo
    response = self._fire_test_request(headers, data)
#x1B[1m#x1B[.../hybridcloud/rpc/service.py#x1B[0m:631: in _fire_test_request
    in_test_assert_no_transaction(
#x1B[1m#x1B[.../db/postgres/transactions.py#x1B[0m:102: in in_test_assert_no_transaction
    assert not hybrid_cloud.simulated_transaction_watermarks.connection_transaction_depth_above_watermark(
#x1B[1m#x1B[31mE   AssertionError: remote service method to .../rpc/user/get_many/ called inside transaction!  Move service calls to outside of transactions.#x1B[0m
tests.sentry.event_manager.grouping.test_assign_to_group::test_existing_group_new_hash_exists[ secondary_hash_exists: True - in_transition: False ]
Stack Traces | 4.73s run time
#x1B[1m#x1B[.../event_manager/grouping/test_assign_to_group.py#x1B[0m:417: in test_existing_group_new_hash_exists
    assert results == {
#x1B[1m#x1B[31mE   AssertionError: assert {'event_assig...y': True, ...} == {'event_assig...y': True, ...}#x1B[0m
#x1B[1m#x1B[31mE     #x1B[0m
#x1B[1m#x1B[31mE     Omitting 11 identical items, use -vv to show#x1B[0m
#x1B[1m#x1B[31mE     Differing items:#x1B[0m
#x1B[1m#x1B[31mE     {'result_tag_value_for_metrics': 'no_match'} != {'result_tag_value_for_metrics': 'found_primary'}#x1B[0m
#x1B[1m#x1B[31mE     #x1B[0m
#x1B[1m#x1B[31mE     Full diff:#x1B[0m
#x1B[1m#x1B[31mE       {#x1B[0m
#x1B[1m#x1B[31mE           'event_assigned_to_given_existing_group': True,#x1B[0m
#x1B[1m#x1B[31mE           'hashes_different': None,#x1B[0m
#x1B[1m#x1B[31mE           'new_group_created': False,#x1B[0m
#x1B[1m#x1B[31mE           'primary_grouphash_existed_already': True,#x1B[0m
#x1B[1m#x1B[31mE           'primary_grouphash_exists_now': True,#x1B[0m
#x1B[1m#x1B[31mE           'primary_hash_calculated': True,#x1B[0m
#x1B[1m#x1B[31mE           'primary_hash_found': True,#x1B[0m
#x1B[1m#x1B[31mE     -     'result_tag_value_for_metrics': 'found_primary',#x1B[0m
#x1B[1m#x1B[31mE     ?                                      ^ --- ---  ^^#x1B[0m
#x1B[1m#x1B[31mE     +     'result_tag_value_for_metrics': 'no_match',#x1B[0m
#x1B[1m#x1B[31mE     ?                                      ^    ^^^#x1B[0m
#x1B[1m#x1B[31mE           'secondary_grouphash_existed_already': None,#x1B[0m
#x1B[1m#x1B[31mE           'secondary_grouphash_exists_now': None,#x1B[0m
#x1B[1m#x1B[31mE           'secondary_hash_calculated': False,#x1B[0m
#x1B[1m#x1B[31mE           'secondary_hash_found': None,#x1B[0m
#x1B[1m#x1B[31mE       }#x1B[0m
tests.sentry.event_manager.grouping.test_assign_to_group::test_existing_group_new_hash_exists[ secondary_hash_exists: True - in_transition: True ]
Stack Traces | 4.97s run time
#x1B[1m#x1B[.../event_manager/grouping/test_assign_to_group.py#x1B[0m:417: in test_existing_group_new_hash_exists
    assert results == {
#x1B[1m#x1B[31mE   AssertionError: assert {'event_assig...y': True, ...} == {'event_assig...y': True, ...}#x1B[0m
#x1B[1m#x1B[31mE     #x1B[0m
#x1B[1m#x1B[31mE     Omitting 5 identical items, use -vv to show#x1B[0m
#x1B[1m#x1B[31mE     Differing items:#x1B[0m
#x1B[1m#x1B[31mE     {'hashes_different': True} != {'hashes_different': None}#x1B[0m
#x1B[1m#x1B[31mE     {'secondary_hash_calculated': True} != {'secondary_hash_calculated': False}#x1B[0m
#x1B[1m#x1B[31mE     {'result_tag_value_for_metrics': 'no_match'} != {'result_tag_value_for_metrics': 'found_primary'}#x1B[0m
#x1B[1m#x1B[31mE     {'secondary_grouphash_existed_already': True} != {'secondary_grouphash_existed_already': None}#x1B[0m
#x1B[1m#x1B[31mE     {'secondary_hash_found': True} != {'secondary_hash_found': None}#x1B[0m
#x1B[1m#x1B[31mE     {'primary_hash_found': False} != {'primary_hash_found': True}#x1B[0m
#x1B[1m#x1B[31mE     {'secondary_grouphash_exists_now': True} != {'secondary_grouphash_exists_now': None}#x1B[0m
#x1B[1m#x1B[31mE     #x1B[0m
#x1B[1m#x1B[31mE     Full diff:#x1B[0m
#x1B[1m#x1B[31mE       {#x1B[0m
#x1B[1m#x1B[31mE           'event_assigned_to_given_existing_group': True,#x1B[0m
#x1B[1m#x1B[31mE     -     'hashes_different': None,#x1B[0m
#x1B[1m#x1B[31mE     ?                         ^^^#x1B[0m
#x1B[1m#x1B[31mE     +     'hashes_different': True,#x1B[0m
#x1B[1m#x1B[31mE     ?                         ^^^#x1B[0m
#x1B[1m#x1B[31mE           'new_group_created': False,#x1B[0m
#x1B[1m#x1B[31mE           'primary_grouphash_existed_already': True,#x1B[0m
#x1B[1m#x1B[31mE           'primary_grouphash_exists_now': True,#x1B[0m
#x1B[1m#x1B[31mE           'primary_hash_calculated': True,#x1B[0m
#x1B[1m#x1B[31mE     -     'primary_hash_found': True,#x1B[0m
#x1B[1m#x1B[31mE     ?                           ^^^#x1B[0m
#x1B[1m#x1B[31mE     +     'primary_hash_found': False,#x1B[0m
#x1B[1m#x1B[31mE     ?                           ^^^^#x1B[0m
#x1B[1m#x1B[31mE     -     'result_tag_value_for_metrics': 'found_primary',#x1B[0m
#x1B[1m#x1B[31mE     ?                                      ^ --- ---  ^^#x1B[0m
#x1B[1m#x1B[31mE     +     'result_tag_value_for_metrics': 'no_match',#x1B[0m
#x1B[1m#x1B[31mE     ?                                      ^    ^^^#x1B[0m
#x1B[1m#x1B[31mE     -     'secondary_grouphash_existed_already': None,#x1B[0m
#x1B[1m#x1B[31mE     ?                                            ^^^#x1B[0m
#x1B[1m#x1B[31mE     +     'secondary_grouphash_existed_already': True,#x1B[0m
#x1B[1m#x1B[31mE     ?                                            ^^^#x1B[0m
#x1B[1m#x1B[31mE     -     'secondary_grouphash_exists_now': None,#x1B[0m
#x1B[1m#x1B[31mE     ?                                       ^^^#x1B[0m
#x1B[1m#x1B[31mE     +     'secondary_grouphash_exists_now': True,#x1B[0m
#x1B[1m#x1B[31mE     ?                                       ^^^#x1B[0m
#x1B[1m#x1B[31mE     -     'secondary_hash_calculated': False,#x1B[0m
#x1B[1m#x1B[31mE     ?                                  ^^^^#x1B[0m
#x1B[1m#x1B[31mE     +     'secondary_hash_calculated': True,#x1B[0m
#x1B[1m#x1B[31mE     ?                                  ^^^#x1B[0m
#x1B[1m#x1B[31mE     -     'secondary_hash_found': None,#x1B[0m
#x1B[1m#x1B[31mE     ?                             ^^^#x1B[0m
#x1B[1m#x1B[31mE     +     'secondary_hash_found': True,#x1B[0m
#x1B[1m#x1B[31mE     ?                             ^^^#x1B[0m
#x1B[1m#x1B[31mE       }#x1B[0m
tests.sentry.event_manager.test_event_manager.EventManagerTest::test_marks_as_unresolved_with_new_release_with_integration
Stack Traces | 5.51s run time
#x1B[1m#x1B[.../sentry/tasks/base.py#x1B[0m:142: in wrapped
    return func(*args, **kwargs)
#x1B[1m#x1B[.../sentry/tasks/base.py#x1B[0m:190: in wrapper
    response = function(*args, **kwargs)
#x1B[1m#x1B[.../integrations/tasks/sync_status_outbound.py#x1B[0m:48: in sync_status_outbound
    integration = integration_service.get_integration(
#x1B[1m#x1B[.../hybridcloud/rpc/service.py#x1B[0m:354: in remote_method
    return dispatch_remote_call(
#x1B[1m#x1B[.../hybridcloud/rpc/service.py#x1B[0m:476: in dispatch_remote_call
    return remote_silo_call.dispatch(use_test_client)
#x1B[1m#x1B[.../hybridcloud/rpc/service.py#x1B[0m:511: in dispatch
    serial_response = self._send_to_remote_silo(use_test_client)
#x1B[1m#x1B[.../hybridcloud/rpc/service.py#x1B[0m:572: in _send_to_remote_silo
    response = self._fire_test_request(headers, data)
#x1B[1m#x1B[.../hybridcloud/rpc/service.py#x1B[0m:631: in _fire_test_request
    in_test_assert_no_transaction(
#x1B[1m#x1B[.../db/postgres/transactions.py#x1B[0m:102: in in_test_assert_no_transaction
    assert not hybrid_cloud.simulated_transaction_watermarks.connection_transaction_depth_above_watermark(
#x1B[1m#x1B[31mE   AssertionError: remote service method to .../rpc/integration/get_integration/ called inside transaction!  Move service calls to outside of transactions.#x1B[0m

#x1B[33mDuring handling of the above exception, another exception occurred:#x1B[0m
#x1B[1m#x1B[.../sentry/event_manager/test_event_manager.py#x1B[0m:1046: in test_marks_as_unresolved_with_new_release_with_integration
    event = manager.save(self.project.id)
#x1B[1m#x1B[31m.venv/lib/python3.13............/site-packages/sentry_sdk/tracing_utils.py#x1B[0m:904: in sync_wrapper
    result = f(*args, **kwargs)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:503: in save
    return self.save_error_events(
#x1B[1m#x1B[31m.venv/lib/python3.13............/site-packages/sentry_sdk/tracing_utils.py#x1B[0m:904: in sync_wrapper
    result = f(*args, **kwargs)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:537: in save_error_events
    group_info = assign_event_to_group(event=job["event"], job=job, metric_tags=metric_tags)
#x1B[1m#x1B[31m.venv/lib/python3.13............/site-packages/sentry_sdk/tracing_utils.py#x1B[0m:904: in sync_wrapper
    result = f(*args, **kwargs)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:1312: in assign_event_to_group
    group_info = create_group_with_grouphashes(job, all_grouphashes)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:1498: in create_group_with_grouphashes
    return handle_existing_grouphash(job, existing_grouphash, grouphashes)
#x1B[1m#x1B[31m.venv/lib/python3.13............/site-packages/sentry_sdk/tracing_utils.py#x1B[0m:904: in sync_wrapper
    result = f(*args, **kwargs)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:1423: in handle_existing_grouphash
    is_regression = _process_existing_aggregate(
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:1879: in _process_existing_aggregate
    is_regression = _handle_regression(group, event, release)
#x1B[1m#x1B[31msrc/sentry/event_manager.py#x1B[0m:1799: in _handle_regression
    kick_off_status_syncs.apply_async(
#x1B[1m#x1B[.../sentry/silo/base.py#x1B[0m:159: in override
    return original_method(*args, **kwargs)
#x1B[1m#x1B[.../sentry/taskworker/task.py#x1B[0m:128: in apply_async
    self._func(*args, **kwargs)
#x1B[1m#x1B[.../sentry/tasks/base.py#x1B[0m:142: in wrapped
    return func(*args, **kwargs)
#x1B[1m#x1B[.../sentry/tasks/base.py#x1B[0m:190: in wrapper
    response = function(*args, **kwargs)
#x1B[1m#x1B[.../integrations/tasks/kick_off_status_syncs.py#x1B[0m:25: in kick_off_status_syncs
    sync_status_outbound.apply_async(
#x1B[1m#x1B[.../sentry/silo/base.py#x1B[0m:159: in override
    return original_method(*args, **kwargs)
#x1B[1m#x1B[.../sentry/taskworker/task.py#x1B[0m:128: in apply_async
    self._func(*args, **kwargs)
#x1B[1m#x1B[.../sentry/tasks/base.py#x1B[0m:178: in wrapped
    retry_task(exc, raise_on_no_retries=raise_on_no_retries)
#x1B[1m#x1B[.../sentry/taskworker/retry.py#x1B[0m:56: in retry_task
    raise RetryError()
#x1B[1m#x1B[31mE   sentry.taskworker.retry.RetryError#x1B[0m
tests.snuba.api.endpoints.test_project_group_index.GroupDeleteTest::test_new_event_for_pending_deletion_group_creates_new_group
Stack Traces | 7.57s run time
#x1B[1m#x1B[31m.venv/lib/python3.13.../db/backends/utils.py#x1B[0m:103: in _execute
    return self.cursor.execute(sql)
#x1B[1m#x1B[.../db/postgres/decorators.py#x1B[0m:16: in inner
    return func(self, *args, **kwargs)
#x1B[1m#x1B[.../db/postgres/base.py#x1B[0m:96: in execute
    return self.cursor.execute(sql)
#x1B[1m#x1B[31mE   psycopg2.errors.ForeignKeyViolation: update or delete on table "sentry_grouphash" violates foreign key constraint "sentry_grouphashmeta_grouphash_id_c47122d9_fk_sentry_gr" on table "sentry_grouphashmetadata"#x1B[0m
#x1B[1m#x1B[31mE   DETAIL:  Key (id)=(293) is still referenced from table "sentry_grouphashmetadata".#x1B[0m

#x1B[33mThe above exception was the direct cause of the following exception:#x1B[0m
#x1B[1m#x1B[.../sentry/testutils/cases.py#x1B[0m:396: in _post_teardown
    super()._post_teardown()
#x1B[1m#x1B[31m.venv/lib/python3.13.../backends/postgresql/base.py#x1B[0m:482: in check_constraints
    cursor.execute("SET CONSTRAINTS ALL IMMEDIATE")
#x1B[1m#x1B[31m.venv/lib/python3.13.../db/backends/utils.py#x1B[0m:122: in execute
    return super().execute(sql, params)
#x1B[1m#x1B[31m.venv/lib/python3.13.../site-packages/sentry_sdk/utils.py#x1B[0m:1849: in runner
    return original_function(*args, **kwargs)
#x1B[1m#x1B[31m.venv/lib/python3.13.../db/backends/utils.py#x1B[0m:79: in execute
    return self._execute_with_wrappers(
#x1B[1m#x1B[31m.venv/lib/python3.13.../db/backends/utils.py#x1B[0m:92: in _execute_with_wrappers
    return executor(sql, params, many, context)
#x1B[1m#x1B[.../db/postgres/base.py#x1B[0m:70: in _execute__include_sql_in_error
    return execute(sql, params, many, context)
#x1B[1m#x1B[.../db/postgres/base.py#x1B[0m:58: in _execute__clean_params
    return execute(sql, clean_bad_params(params), many, context)
#x1B[1m#x1B[.../sentry/testutils/hybrid_cloud.py#x1B[0m:133: in __call__
    return execute(*params)
#x1B[1m#x1B[31m.venv/lib/python3.13.../db/backends/utils.py#x1B[0m:100: in _execute
    with self.db.wrap_database_errors:
#x1B[1m#x1B[31m.venv/lib/python3.13.../django/db/utils.py#x1B[0m:91: in __exit__
    raise dj_exc_value.with_traceback(traceback) from exc_value
#x1B[1m#x1B[31m.venv/lib/python3.13.../db/backends/utils.py#x1B[0m:103: in _execute
    return self.cursor.execute(sql)
#x1B[1m#x1B[.../db/postgres/decorators.py#x1B[0m:16: in inner
    return func(self, *args, **kwargs)
#x1B[1m#x1B[.../db/postgres/base.py#x1B[0m:96: in execute
    return self.cursor.execute(sql)
#x1B[1m#x1B[31mE   django.db.utils.IntegrityError: update or delete on table "sentry_grouphash" violates foreign key constraint "sentry_grouphashmeta_grouphash_id_c47122d9_fk_sentry_gr" on table "sentry_grouphashmetadata"#x1B[0m
#x1B[1m#x1B[31mE   DETAIL:  Key (id)=(293) is still referenced from table "sentry_grouphashmetadata".#x1B[0m
#x1B[1m#x1B[31mE   #x1B[0m
#x1B[1m#x1B[31mE   SQL: SET CONSTRAINTS ALL IMMEDIATE#x1B[0m

To view more test analytics, go to the Test Analytics Dashboard
📋 Got 3 mins? Take this short survey to help us improve Test Analytics.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment

Labels

Scope: Backend Automatically applied to PRs that change backend components

Projects

None yet

Development

Successfully merging this pull request may close these issues.

2 participants