diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index 43e5de23fccf61..ccdf6e4c6d1714 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -2411,14 +2411,10 @@ def _calculate_event_grouping( Main entrypoint for modifying/enhancing and grouping an event, writes hashes back into event payload. """ - load_stacktrace_from_cache = bool(event.org_can_load_stacktrace_from_cache) metric_tags: MutableTags = { "grouping_config": grouping_config["id"], "platform": event.platform or "unknown", - "loading_from_cache": load_stacktrace_from_cache, } - # This will help us differentiate when a transaction uses caching vs not - sentry_sdk.set_tag("stacktrace.loaded_from_cache", load_stacktrace_from_cache) with metrics.timer("event_manager.normalize_stacktraces_for_grouping", tags=metric_tags): with sentry_sdk.start_span(op="event_manager.normalize_stacktraces_for_grouping"): diff --git a/src/sentry/eventstore/models.py b/src/sentry/eventstore/models.py index 653a07893b5fe7..4448c7149fcc79 100644 --- a/src/sentry/eventstore/models.py +++ b/src/sentry/eventstore/models.py @@ -23,7 +23,7 @@ from django.conf import settings from django.utils.encoding import force_str -from sentry import eventtypes, features +from sentry import eventtypes from sentry.db.models import NodeData from sentry.grouping.result import CalculatedHashes from sentry.interfaces.base import Interface, get_interfaces @@ -419,11 +419,7 @@ def normalize_stacktraces_for_grouping(self, grouping_config) -> None: """ from sentry.stacktraces.processing import normalize_stacktraces_for_grouping - normalize_stacktraces_for_grouping( - self.data, - grouping_config, - load_stacktrace_from_cache=self.org_can_load_stacktrace_from_cache, - ) + normalize_stacktraces_for_grouping(self.data, grouping_config) # We have modified event data, so any cached interfaces have to be reset: self.__dict__.pop("interfaces", None) @@ -491,10 +487,6 @@ def get_span_groupings( def organization(self) -> Organization: return self.project.organization - @property - def org_can_load_stacktrace_from_cache(self) -> bool: - return features.has("organizations:stacktrace-processing-caching", self.organization) - @property def version(self) -> str: return cast(str, self.data.get("version", "5")) diff --git a/src/sentry/grouping/enhancer/__init__.py b/src/sentry/grouping/enhancer/__init__.py index cc92a04396ce39..ba5646f96d6ff9 100644 --- a/src/sentry/grouping/enhancer/__init__.py +++ b/src/sentry/grouping/enhancer/__init__.py @@ -142,7 +142,6 @@ def apply_modifications_to_frame( platform: str, exception_data: dict[str, Any], extra_fingerprint: str = "", - load_stacktrace_from_cache: bool = False, ) -> None: """This applies the frame modifications to the frames itself. This does not affect grouping.""" in_memory_cache: dict[str, str] = {} @@ -158,12 +157,7 @@ def apply_modifications_to_frame( cache_key = f"stacktrace_hash.{stacktrace_fingerprint}" use_cache = bool(stacktrace_fingerprint) if use_cache: - frames_changed = _update_frames_from_cached_values( - frames, - cache_key, - platform, - load_from_cache=load_stacktrace_from_cache, - ) + frames_changed = _update_frames_from_cached_values(frames, cache_key, platform) if frames_changed: logger.info("The frames have been loaded from the cache. Skipping some work.") return @@ -503,12 +497,11 @@ def visit_quoted_ident(self, node, children): def _update_frames_from_cached_values( - frames: Sequence[dict[str, Any]], cache_key: str, platform: str, load_from_cache: bool = False + frames: Sequence[dict[str, Any]], cache_key: str, platform: str ) -> bool: """ This will update the frames of the stacktrace if it's been cached. - Set load_from_cache to True to actually change the frames. - Returns if the merged has correctly happened. + Returns True if the merged has correctly happened. """ frames_changed = False changed_frames_values = cache.get(cache_key, {}) @@ -516,13 +509,9 @@ def _update_frames_from_cached_values( # This helps tracking changes in the hit/miss ratio of the cache metrics.incr( f"{DATADOG_KEY}.cache.get", - tags={ - "success": bool(changed_frames_values), - "platform": platform, - "loading_from_cache": load_from_cache, - }, + tags={"success": bool(changed_frames_values), "platform": platform}, ) - if changed_frames_values and load_from_cache: + if changed_frames_values: try: for frame, changed_frame_values in zip(frames, changed_frames_values): if changed_frame_values.get("in_app") is not None: @@ -545,11 +534,7 @@ def _update_frames_from_cached_values( metrics.incr( f"{DATADOG_KEY}.merged_cached_values", - tags={ - "success": frames_changed, - "platform": platform, - "loading_from_cache": load_from_cache, - }, + tags={"success": frames_changed, "platform": platform}, ) return frames_changed diff --git a/src/sentry/stacktraces/processing.py b/src/sentry/stacktraces/processing.py index b6587c463f59d3..559e2dece55ccf 100644 --- a/src/sentry/stacktraces/processing.py +++ b/src/sentry/stacktraces/processing.py @@ -301,7 +301,7 @@ def _normalize_in_app(stacktrace: Sequence[dict[str, str]]) -> str: def normalize_stacktraces_for_grouping( - data: MutableMapping[str, Any], grouping_config=None, load_stacktrace_from_cache: bool = False + data: MutableMapping[str, Any], grouping_config=None ) -> None: """ Applies grouping enhancement rules and ensure in_app is set on all frames. @@ -341,11 +341,7 @@ def normalize_stacktraces_for_grouping( for frames, stacktrace_container in zip(stacktrace_frames, stacktrace_containers): # This call has a caching mechanism when the same stacktrace and rules are used grouping_config.enhancements.apply_modifications_to_frame( - frames, - platform, - stacktrace_container, - extra_fingerprint=grouping_config.id, - load_stacktrace_from_cache=load_stacktrace_from_cache, + frames, platform, stacktrace_container, extra_fingerprint=grouping_config.id ) # normalize `in_app` values, noting and storing the event's mix of in-app and system frames, so