Skip to content

Commit

Permalink
feat(perf-detector-threshold-configuration) Added new thresholds and … (
Browse files Browse the repository at this point in the history
#53460)

For project: [Detector Threshold
Configurations](https://www.notion.so/sentry/Detector-Threshold-Configuration-f8cb07e7cceb42388cedb09ea05fc116)
Lowered defaults values and effects:
[link](https://www.notion.so/sentry/Detector-Dry-Run-Results-40dc7a3e4d8b4b9ea8da90608fe54747)

- We are GA'ing the project with lowered default values for thresholds
and adding new thresholds.
- Changed default threshold for Consecutive HTTP to 500ms to match prod
sentry_option value:
[query](https://redash.getsentry.net/queries/4546/source)
- Added lowered defaults for N+1 DB duration (iterating down to 50ms),
Slow DB Queries duration(iterating down to 500ms), Consecutive http span
duration (iterating down to 500ms)
- Updated Consecutive HTTP Detector to add new total min time saved
threshold with a default of 2000s.
- Updated Consecutive HTTP Detector tests to ensure new total duration
threshold is respected.
- Removed Extended Consecutive HTTP Detector  and tests.

---------

Co-authored-by: Abdullah Khan <abdullahkhan@PG9Y57YDXQ.local>
  • Loading branch information
Abdkhan14 and Abdullah Khan committed Jul 25, 2023
1 parent 4afce06 commit 0597a70
Show file tree
Hide file tree
Showing 10 changed files with 219 additions and 435 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ class ConfigurableThresholds(Enum):
RENDER_BLOCKING_ASSET_FCP_RATIO = "render_blocking_fcp_ratio"
SLOW_DB_QUERY_DURATION = "slow_db_query_duration_threshold"
N_PLUS_API_CALLS_DURATION = "n_plus_one_api_calls_total_duration_threshold"
CONSECUTIVE_HTTP_SPANS_MIN_TIME_SAVED = "consecutive_http_spans_min_time_saved_threshold"


internal_only_project_settings_to_group_map: Dict[str, Type[GroupType]] = {
Expand All @@ -84,6 +85,7 @@ class ConfigurableThresholds(Enum):
ConfigurableThresholds.RENDER_BLOCKING_ASSET_FCP_RATIO.value: InternalProjectOptions.RENDER_BLOCKING_ASSET.value,
ConfigurableThresholds.SLOW_DB_QUERY_DURATION.value: InternalProjectOptions.SLOW_DB_QUERY.value,
ConfigurableThresholds.N_PLUS_API_CALLS_DURATION.value: InternalProjectOptions.N_PLUS_ONE_API_CALLS.value,
ConfigurableThresholds.CONSECUTIVE_HTTP_SPANS_MIN_TIME_SAVED.value: InternalProjectOptions.CONSECUTIVE_HTTP_SPANS.value,
}


Expand Down Expand Up @@ -125,6 +127,9 @@ class ProjectPerformanceIssueSettingsSerializer(serializers.Serializer):
n_plus_one_api_calls_total_duration_threshold = serializers.IntegerField(
required=False, min_value=100, max_value=TEN_SECONDS # ms
)
consecutive_http_spans_min_time_saved_threshold = serializers.IntegerField(
required=False, min_value=1000, max_value=TEN_SECONDS # ms
)
uncompressed_assets_detection_enabled = serializers.BooleanField(required=False)
consecutive_http_spans_detection_enabled = serializers.BooleanField(required=False)
large_http_payload_detection_enabled = serializers.BooleanField(required=False)
Expand Down
13 changes: 9 additions & 4 deletions src/sentry/options/defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -1229,12 +1229,12 @@
)
register(
"performance.issues.n_plus_one_db.duration_threshold",
default=100.0,
default=90.0,
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
register(
"performance.issues.slow_db_query.duration_threshold",
default=1000.0, # ms
default=900.0, # ms
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
register(
Expand All @@ -1259,7 +1259,7 @@
)
register(
"performance.issues.consecutive_http.max_duration_between_spans",
default=500,
default=500, # ms
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
register(
Expand All @@ -1269,7 +1269,12 @@
)
register(
"performance.issues.consecutive_http.span_duration_threshold",
default=1000,
default=900, # ms
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
register(
"performance.issues.consecutive_http.min_time_saved_threshold",
default=2000, # ms
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
register(
Expand Down
2 changes: 0 additions & 2 deletions src/sentry/utils/performance_issues/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ class DetectorType(Enum):
N_PLUS_ONE_API_CALLS = "n_plus_one_api_calls"
CONSECUTIVE_DB_OP = "consecutive_db"
CONSECUTIVE_HTTP_OP = "consecutive_http"
CONSECUTIVE_HTTP_OP_EXTENDED = "consecutive_http_ext"
LARGE_HTTP_PAYLOAD = "large_http_payload"
FILE_IO_MAIN_THREAD = "file_io_main_thread"
M_N_PLUS_ONE_DB = "m_n_plus_one_db"
Expand All @@ -57,7 +56,6 @@ class DetectorType(Enum):
DetectorType.M_N_PLUS_ONE_DB: PerformanceMNPlusOneDBQueriesGroupType,
DetectorType.UNCOMPRESSED_ASSETS: PerformanceUncompressedAssetsGroupType,
DetectorType.CONSECUTIVE_HTTP_OP: PerformanceConsecutiveHTTPQueriesGroupType,
DetectorType.CONSECUTIVE_HTTP_OP_EXTENDED: PerformanceConsecutiveHTTPQueriesGroupType,
DetectorType.DB_MAIN_THREAD: PerformanceDBMainThreadGroupType,
DetectorType.LARGE_HTTP_PAYLOAD: PerformanceLargeHTTPPayloadGroupType,
DetectorType.HTTP_OVERHEAD: PerformanceHTTPOverheadGroupType,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,10 @@ def visit_span(self, span: Span) -> None:
if not span_id or not self._is_eligible_http_span(span):
return

span_duration = get_span_duration(span).total_seconds() * 1000
if span_duration < self.settings.get("span_duration_threshold"):
return

if self._overlaps_last_span(span):
self._validate_and_store_performance_problem()
self._reset_variables()
Expand All @@ -63,11 +67,12 @@ def _validate_and_store_performance_problem(self):
exceeds_count_threshold = len(self.consecutive_http_spans) >= self.settings.get(
"consecutive_count_threshold"
)
exceeds_span_duration_threshold = all(
get_span_duration(span).total_seconds() * 1000
> self.settings.get("span_duration_threshold")
for span in self.consecutive_http_spans
)

exceeds_min_time_saved_duration = False
if self.consecutive_http_spans:
exceeds_min_time_saved_duration = self._calculate_time_saved() >= self.settings.get(
"min_time_saved"
)

subceeds_duration_between_spans_threshold = all(
get_duration_between_spans(
Expand All @@ -79,11 +84,17 @@ def _validate_and_store_performance_problem(self):

if (
exceeds_count_threshold
and exceeds_span_duration_threshold
and subceeds_duration_between_spans_threshold
and exceeds_min_time_saved_duration
):
self._store_performance_problem()

def _calculate_time_saved(self) -> float:
total_time = get_total_span_duration(self.consecutive_http_spans)
max_span_duration = get_max_span_duration(self.consecutive_http_spans)

return total_time - max_span_duration

def _store_performance_problem(self) -> None:
fingerprint = self._fingerprint()
offender_span_ids = [span.get("span_id", None) for span in self.consecutive_http_spans]
Expand Down Expand Up @@ -166,73 +177,3 @@ def is_creation_allowed_for_organization(self, organization: Organization) -> bo

def is_creation_allowed_for_project(self, project: Project) -> bool:
return self.settings["detection_enabled"]


class ConsecutiveHTTPSpanDetectorExtended(ConsecutiveHTTPSpanDetector):
"""
Detector goals:
- Extend Consecutive HTTP Span Detector to mimic detection using thresholds from
- Consecutive DB Queries Detector.
"""

type = DetectorType.CONSECUTIVE_HTTP_OP_EXTENDED
settings_key = DetectorType.CONSECUTIVE_HTTP_OP_EXTENDED

def visit_span(self, span: Span) -> None:
if is_event_from_browser_javascript_sdk(self.event()):
return

span_id = span.get("span_id", None)

if not span_id or not self._is_eligible_http_span(span):
return

span_duration = get_span_duration(span).total_seconds() * 1000
if span_duration < self.settings.get("span_duration_threshold"):
return

if self._overlaps_last_span(span):
self._validate_and_store_performance_problem()
self._reset_variables()

self._add_problem_span(span)

def _validate_and_store_performance_problem(self):
exceeds_count_threshold = len(self.consecutive_http_spans) >= self.settings.get(
"consecutive_count_threshold"
)

exceeds_min_time_saved_duration = False
if self.consecutive_http_spans:
exceeds_min_time_saved_duration = self._calculate_time_saved() >= self.settings.get(
"min_time_saved"
)

subceeds_duration_between_spans_threshold = all(
get_duration_between_spans(
self.consecutive_http_spans[idx - 1], self.consecutive_http_spans[idx]
)
< self.settings.get("max_duration_between_spans")
for idx in range(1, len(self.consecutive_http_spans))
)

if (
exceeds_count_threshold
and subceeds_duration_between_spans_threshold
and exceeds_min_time_saved_duration
):
self._store_performance_problem()

def _calculate_time_saved(self) -> float:
total_time = get_total_span_duration(self.consecutive_http_spans)
max_span_duration = get_max_span_duration(self.consecutive_http_spans)

return total_time - max_span_duration

def is_creation_allowed_for_organization(self, organization: Organization) -> bool:
# Only collecting metrics.
return False

def is_creation_allowed_for_project(self, project: Project) -> bool:
# Only collecting metrics.
return False
14 changes: 4 additions & 10 deletions src/sentry/utils/performance_issues/performance_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,6 @@
from sentry.utils import metrics
from sentry.utils.event import is_event_from_browser_javascript_sdk
from sentry.utils.event_frames import get_sdk_name
from sentry.utils.performance_issues.detectors.consecutive_http_detector import (
ConsecutiveHTTPSpanDetectorExtended,
)
from sentry.utils.safe import get_path

from .base import DetectorType, PerformanceDetector
Expand Down Expand Up @@ -159,6 +156,9 @@ def get_merged_settings(project_id: Optional[int] = None) -> Dict[str | Any, Any
"consecutive_http_spans_span_duration_threshold": options.get(
"performance.issues.consecutive_http.span_duration_threshold"
),
"consecutive_http_spans_min_time_saved_threshold": options.get(
"performance.issues.consecutive_http.min_time_saved_threshold"
),
"large_http_payload_size_threshold": options.get(
"performance.issues.large_http_payload.size_threshold"
),
Expand Down Expand Up @@ -287,18 +287,13 @@ def get_detection_settings(project_id: Optional[int] = None) -> Dict[DetectorTyp
"span_duration_threshold": settings[
"consecutive_http_spans_span_duration_threshold"
], # ms
"min_time_saved": settings["consecutive_http_spans_min_time_saved_threshold"], # ms
"consecutive_count_threshold": settings["consecutive_http_spans_count_threshold"],
"max_duration_between_spans": settings[
"consecutive_http_spans_max_duration_between_spans"
], # ms
"detection_enabled": settings["consecutive_http_spans_detection_enabled"],
},
DetectorType.CONSECUTIVE_HTTP_OP_EXTENDED: {
"span_duration_threshold": 500, # ms
"min_time_saved": 2000, # time saved by running all queries in parallel
"consecutive_count_threshold": 3,
"max_duration_between_spans": 1000, # ms
},
DetectorType.LARGE_HTTP_PAYLOAD: {
"payload_size_threshold": settings["large_http_payload_size_threshold"],
"detection_enabled": settings["large_http_payload_detection_enabled"],
Expand All @@ -320,7 +315,6 @@ def _detect_performance_problems(
detectors: List[PerformanceDetector] = [
ConsecutiveDBSpanDetector(detection_settings, data),
ConsecutiveHTTPSpanDetector(detection_settings, data),
ConsecutiveHTTPSpanDetectorExtended(detection_settings, data),
DBMainThreadDetector(detection_settings, data),
SlowDBQueryDetector(detection_settings, data),
RenderBlockingAssetSpanDetector(detection_settings, data),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ def test_get_project_options_overrides_threshold_defaults(self, get_value):
"performance.issues.uncompressed_asset.size_threshold": 200000,
"performance.issues.consecutive_db.min_time_saved_threshold": 300,
"performance.issues.n_plus_one_api_calls.total_duration": 300,
"performance.issues.consecutive_http.min_time_saved_threshold": 2000,
}
):
with self.feature(PERFORMANCE_ISSUE_FEATURES):
Expand All @@ -110,6 +111,7 @@ def test_get_project_options_overrides_threshold_defaults(self, get_value):
assert response.data["uncompressed_asset_size_threshold"] == 200000
assert response.data["consecutive_db_min_time_saved_threshold"] == 300
assert response.data["n_plus_one_api_calls_total_duration_threshold"] == 300
assert response.data["consecutive_http_spans_min_time_saved_threshold"] == 2000

get_value.return_value = {
"n_plus_one_db_duration_threshold": 10000,
Expand All @@ -122,6 +124,7 @@ def test_get_project_options_overrides_threshold_defaults(self, get_value):
"file_io_on_main_thread_duration_threshold": 33,
"consecutive_db_min_time_saved_threshold": 5000,
"n_plus_one_api_calls_total_duration_threshold": 500,
"consecutive_http_spans_min_time_saved_threshold": 1000,
}

with self.feature(PERFORMANCE_ISSUE_FEATURES):
Expand All @@ -140,6 +143,7 @@ def test_get_project_options_overrides_threshold_defaults(self, get_value):
assert response.data["file_io_on_main_thread_duration_threshold"] == 33
assert response.data["consecutive_db_min_time_saved_threshold"] == 5000
assert response.data["n_plus_one_api_calls_total_duration_threshold"] == 500
assert response.data["consecutive_http_spans_min_time_saved_threshold"] == 1000

def test_get_returns_error_without_feature_enabled(self):
with self.feature({}):
Expand Down
Loading

0 comments on commit 0597a70

Please sign in to comment.