Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(grouping): Remove all uses of non-optimized-grouping-related code #78260

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 6 additions & 21 deletions src/sentry/event_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,7 @@
GroupingConfig,
get_grouping_config_dict_for_project,
)
from sentry.grouping.ingest.config import (
is_in_transition,
project_uses_optimized_grouping,
update_grouping_config_if_needed,
)
from sentry.grouping.ingest.config import is_in_transition, update_grouping_config_if_needed
from sentry.grouping.ingest.hashing import (
find_existing_grouphash,
get_hash_values,
Expand Down Expand Up @@ -517,12 +513,10 @@ def save(
return jobs[0]["event"]
else:
project = job["event"].project
job["optimized_grouping"] = project_uses_optimized_grouping(project)
job["in_grouping_transition"] = is_in_transition(project)
metric_tags = {
"platform": job["event"].platform or "unknown",
"sdk": normalized_sdk_tag_from_event(job["event"].data),
"using_transition_optimization": job["optimized_grouping"],
"in_transition": job["in_grouping_transition"],
}
# This metric allows differentiating from all calls to the `event_manager.save` metric
Expand Down Expand Up @@ -1323,20 +1317,11 @@ def get_culprit(data: Mapping[str, Any]) -> str:

@sentry_sdk.tracing.trace
def assign_event_to_group(event: Event, job: Job, metric_tags: MutableTags) -> GroupInfo | None:
if job["optimized_grouping"]:
group_info = _save_aggregate_new(
event=event,
job=job,
metric_tags=metric_tags,
)
else:
group_info = _save_aggregate(
event=event,
job=job,
release=job["release"],
received_timestamp=job["received_timestamp"],
metric_tags=metric_tags,
)
group_info = _save_aggregate_new(
event=event,
job=job,
metric_tags=metric_tags,
)

if group_info:
event.group = group_info.group
Expand Down
3 changes: 1 addition & 2 deletions src/sentry/grouping/ingest/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from sentry import options
from sentry.grouping.api import GroupingConfig
from sentry.grouping.ingest.config import is_in_transition, project_uses_optimized_grouping
from sentry.grouping.ingest.config import is_in_transition
from sentry.models.project import Project
from sentry.utils import metrics
from sentry.utils.tag_normalization import normalized_sdk_tag_from_event
Expand Down Expand Up @@ -65,7 +65,6 @@ def record_calculation_metric_with_result(
# count to get an average number of calculations per event
tags = {
"in_transition": str(is_in_transition(project)),
"using_transition_optimization": str(project_uses_optimized_grouping(project)),
"result": result,
}
metrics.incr(
Expand Down
138 changes: 17 additions & 121 deletions tests/sentry/event_manager/grouping/test_assign_to_group.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
from __future__ import annotations

from contextlib import contextmanager
from random import randint
from time import time
from typing import Any
from unittest import mock
from unittest.mock import MagicMock, patch

import pytest

from sentry.event_manager import _create_group, _save_aggregate, _save_aggregate_new
from sentry.event_manager import _create_group
from sentry.eventstore.models import Event
from sentry.grouping.ingest.hashing import (
_calculate_primary_hashes,
Expand All @@ -20,10 +18,7 @@
from sentry.models.grouphash import GroupHash
from sentry.models.project import Project
from sentry.projectoptions.defaults import DEFAULT_GROUPING_CONFIG, LEGACY_GROUPING_CONFIG
from sentry.testutils.factories import Factories
from sentry.testutils.helpers.eventprocessing import save_new_event
from sentry.testutils.helpers.features import Feature
from sentry.testutils.helpers.options import override_options
from sentry.testutils.pytest.fixtures import django_db_all
from sentry.testutils.pytest.mocking import capture_results
from sentry.testutils.skips import requires_snuba
Expand Down Expand Up @@ -130,7 +125,6 @@ def get_results_from_saving_event(
secondary_config: str,
in_transition: bool,
existing_group_id: int | None = None,
new_logic_enabled: bool = False,
):
# Whether or not these are assigned a value depends on the values of `in_transition` and
# `existing_group_id`. Everything else we'll return will definitely get a value and therefore
Expand All @@ -148,12 +142,7 @@ def get_results_from_saving_event(

return_values: dict[str, list[Any]] = {}

with (
patch_grouping_helpers(return_values) as spies,
mock.patch(
"sentry.event_manager.project_uses_optimized_grouping", return_value=new_logic_enabled
),
):
with patch_grouping_helpers(return_values) as spies:
calculate_secondary_hash_spy = spies["_calculate_secondary_hashes"]
create_group_spy = spies["_create_group"]
calculate_primary_hash_spy = spies["_calculate_primary_hashes"]
Expand Down Expand Up @@ -250,13 +239,7 @@ def get_results_from_saving_event(
@pytest.mark.parametrize(
"in_transition", (True, False), ids=(" in_transition: True ", " in_transition: False ")
)
@pytest.mark.parametrize(
"new_logic_enabled",
(True, False),
ids=(" new_logic_enabled: True ", " new_logic_enabled: False "),
)
def test_new_group(
new_logic_enabled: bool,
in_transition: bool,
default_project: Project,
):
Expand All @@ -269,7 +252,6 @@ def test_new_group(
primary_config=DEFAULT_GROUPING_CONFIG,
secondary_config=LEGACY_GROUPING_CONFIG,
in_transition=in_transition,
new_logic_enabled=new_logic_enabled,
)

if in_transition:
Expand Down Expand Up @@ -311,13 +293,7 @@ def test_new_group(
@pytest.mark.parametrize(
"in_transition", (True, False), ids=(" in_transition: True ", " in_transition: False ")
)
@pytest.mark.parametrize(
"new_logic_enabled",
(True, False),
ids=(" new_logic_enabled: True ", " new_logic_enabled: False "),
)
def test_existing_group_no_new_hash(
new_logic_enabled: bool,
in_transition: bool,
default_project: Project,
):
Expand All @@ -335,7 +311,6 @@ def test_existing_group_no_new_hash(
secondary_config=LEGACY_GROUPING_CONFIG,
in_transition=in_transition,
existing_group_id=existing_event.group_id,
new_logic_enabled=new_logic_enabled,
)

if in_transition:
Expand Down Expand Up @@ -375,19 +350,13 @@ def test_existing_group_no_new_hash(
@pytest.mark.parametrize(
"in_transition", (True, False), ids=(" in_transition: True ", " in_transition: False ")
)
@pytest.mark.parametrize(
"new_logic_enabled",
(True, False),
ids=(" new_logic_enabled: True ", " new_logic_enabled: False "),
)
@pytest.mark.parametrize(
"secondary_hash_exists",
(True, False),
ids=(" secondary_hash_exists: True ", " secondary_hash_exists: False "),
)
def test_existing_group_new_hash_exists(
secondary_hash_exists: bool,
new_logic_enabled: bool,
in_transition: bool,
default_project: Project,
):
Expand Down Expand Up @@ -426,93 +395,20 @@ def test_existing_group_new_hash_exists(
secondary_config=LEGACY_GROUPING_CONFIG,
in_transition=in_transition,
existing_group_id=existing_event.group_id,
new_logic_enabled=new_logic_enabled,
)

if in_transition and not new_logic_enabled:
assert results == {
"primary_hash_calculated": True,
"secondary_hash_calculated": True,
"hashes_different": True,
"primary_hash_found": True,
"secondary_hash_found": False, # We found the new hash first and quit looking
"new_group_created": False,
"event_assigned_to_given_existing_group": True,
"primary_grouphash_existed_already": True,
"secondary_grouphash_existed_already": secondary_hash_exists,
"primary_grouphash_exists_now": True,
"secondary_grouphash_exists_now": True,
"result_tag_value_for_metrics": "found_primary",
}
# Equivalent to `elif (in_transition and new_logic_enabled) or not in_transition`. In other
# words, with the new logic, if the new hash exists, it doesn't matter whether we're in
# transition or not - no extra calculations are performed.
else:
assert results == {
"primary_hash_calculated": True,
"secondary_hash_calculated": False,
"primary_hash_found": True,
"new_group_created": False,
"event_assigned_to_given_existing_group": True,
"primary_grouphash_existed_already": True,
"primary_grouphash_exists_now": True,
"result_tag_value_for_metrics": "found_primary",
# The rest are moot since no secondary hash was calculated.
"hashes_different": None,
"secondary_hash_found": None,
"secondary_grouphash_existed_already": None,
"secondary_grouphash_exists_now": None,
}


@django_db_all
@pytest.mark.parametrize(
"killswitch_enabled",
(True, False),
ids=(" killswitch_enabled: True ", " killswitch_enabled: False "),
)
@pytest.mark.parametrize("flag_on", (True, False), ids=(" flag_on: True ", " flag_on: False "))
@pytest.mark.parametrize(
"in_transition", (True, False), ids=(" in_transition: True ", " in_transition: False ")
)
@pytest.mark.parametrize("id_qualifies", (True,), ids=(" id_qualifies: True ",))
@patch("sentry.event_manager._save_aggregate_new", wraps=_save_aggregate_new)
@patch("sentry.event_manager._save_aggregate", wraps=_save_aggregate)
def test_uses_regular_or_optimized_grouping_as_appropriate(
mock_save_aggregate: MagicMock,
mock_save_aggregate_new: MagicMock,
id_qualifies: bool,
in_transition: bool,
flag_on: bool,
killswitch_enabled: bool,
):
# The snowflake id seems to sometimes get stuck generating nothing but even numbers, so replace
# it with something we know will generate both odds and evens
with patch(
"sentry.utils.snowflake.generate_snowflake_id", side_effect=lambda _: randint(1, 1000)
):
# Keep making projects until we get an id which matches `id_qualifies`
org = Factories.create_organization()
project = Factories.create_project(organization=org)
while (project.id % 5 >= 5) if id_qualifies else (project.id % 5 < 5):
project = Factories.create_project(organization=org)

with (
Feature({"organizations:grouping-suppress-unnecessary-secondary-hash": flag_on}),
patch("sentry.grouping.ingest.config.is_in_transition", return_value=in_transition),
override_options({"grouping.config_transition.killswitch_enabled": killswitch_enabled}),
):
save_event_with_grouping_config(
{"message": "Dogs are great!"}, project, DEFAULT_GROUPING_CONFIG
)

if killswitch_enabled:
assert mock_save_aggregate.call_count == 1
elif flag_on:
assert mock_save_aggregate_new.call_count == 1
elif in_transition:
assert mock_save_aggregate_new.call_count == 1
elif id_qualifies:
assert mock_save_aggregate_new.call_count == 1
else:
assert mock_save_aggregate.call_count == 1
assert results == {
"primary_hash_calculated": True,
"secondary_hash_calculated": False,
"primary_hash_found": True,
"new_group_created": False,
"event_assigned_to_given_existing_group": True,
"primary_grouphash_existed_already": True,
"primary_grouphash_exists_now": True,
"result_tag_value_for_metrics": "found_primary",
# The rest are moot since no secondary hash was calculated.
"hashes_different": None,
"secondary_hash_found": None,
"secondary_grouphash_existed_already": None,
"secondary_grouphash_exists_now": None,
}
Loading
Loading