From a05bfaf6ca602674c8c8880f363b0a1ab793d24d Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Mon, 17 Jul 2023 08:43:27 -0400 Subject: [PATCH 01/67] feat(perf): Add http/1.1 overhead detector (#52893) ### Summary Adds a new detector (flags aren't going to be flipped until after audit). Servers using http/1.1 might be causing queueing behaviour on the browser when the connections reach a certain amount. This detector roughly detects overlapping spans against the same location (connections are limited per host), assuming that queue depth is causing monotonically increasing requestStart. #### Other - We'll likely need to change the network timing (`requestStart`) this currently targets, but sdk needs to be cut before that. - Our sdk collects absolute paths, so we skip url parsing in that case and use location '/' - Didn't want to have `/1.1` in filenames or enums so change the file/vars to `http overhead`. - We'll try a `500` delay threshold for now, meaning the peak of the increasing times should be 500ms. I've found existing real events that fire on this. --------- Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com> --- .../project_performance_issue_settings.py | 1 + src/sentry/features/__init__.py | 1 + src/sentry/issues/grouptype.py | 8 +- src/sentry/options/defaults.py | 19 ++ src/sentry/projectoptions/defaults.py | 1 + src/sentry/utils/performance_issues/base.py | 10 + .../performance_issues/detectors/__init__.py | 1 + .../detectors/consecutive_http_detector.py | 9 +- .../detectors/http_overhead_detector.py | 189 ++++++++++++++++++ .../performance_detection.py | 9 + .../test_http_overhead_detector.py | 180 +++++++++++++++++ .../test_performance_detection.py | 42 ++-- 12 files changed, 436 insertions(+), 34 deletions(-) create mode 100644 src/sentry/utils/performance_issues/detectors/http_overhead_detector.py create mode 100644 tests/sentry/utils/performance_issues/test_http_overhead_detector.py diff --git a/src/sentry/api/endpoints/project_performance_issue_settings.py b/src/sentry/api/endpoints/project_performance_issue_settings.py index 8110dde3b21e2b..1986b71fe7e8b5 100644 --- a/src/sentry/api/endpoints/project_performance_issue_settings.py +++ b/src/sentry/api/endpoints/project_performance_issue_settings.py @@ -91,6 +91,7 @@ class ProjectPerformanceIssueSettingsSerializer(serializers.Serializer): consecutive_db_queries_detection_enabled = serializers.BooleanField(required=False) large_render_blocking_asset_detection_enabled = serializers.BooleanField(required=False) slow_db_queries_detection_enabled = serializers.BooleanField(required=False) + http_overhead_detection_enabled = serializers.BooleanField(required=False) @region_silo_endpoint diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 3bc6e8457743f2..b6e0959c612d62 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -130,6 +130,7 @@ default_manager.add("organizations:performance-n-plus-one-api-calls-detector", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:performance-issues-compressed-assets-detector", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:performance-issues-render-blocking-assets-detector", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) +default_manager.add("organizations:performance-issues-http-overhead-detector", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:performance-issues-m-n-plus-one-db-detector", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:performance-issues-dev", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:project-performance-settings-admin", OrganizationFeature, FeatureHandlerStrategy.REMOTE) diff --git a/src/sentry/issues/grouptype.py b/src/sentry/issues/grouptype.py index a95821ba8ae0ae..81eb2987c2e614 100644 --- a/src/sentry/issues/grouptype.py +++ b/src/sentry/issues/grouptype.py @@ -290,9 +290,15 @@ class PerformanceLargeHTTPPayloadGroupType(PerformanceGroupTypeDefaults, GroupTy category = GroupCategory.PERFORMANCE.value -# 2000 was ProfileBlockingFunctionMainThreadType +@dataclass(frozen=True) +class PerformanceHTTPOverheadGroupType(PerformanceGroupTypeDefaults, GroupType): + type_id = 1016 + slug = "performance_http_overhead" + description = "HTTP/1.1 Overhead" + category = GroupCategory.PERFORMANCE.value +# 2000 was ProfileBlockingFunctionMainThreadType @dataclass(frozen=True) class ProfileFileIOGroupType(GroupType): type_id = 2001 diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 6848830ff7ba43..8dd2e3802e219b 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -1203,6 +1203,20 @@ register( "performance.issues.m_n_plus_one_db.ga-rollout", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE ) +register( + "performance.issues.http_overhead.problem-creation", + default=0.0, + flags=FLAG_AUTOMATOR_MODIFIABLE, +) +register( + "performance.issues.http_overhead.la-rollout", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE +) +register( + "performance.issues.http_overhead.ea-rollout", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE +) +register( + "performance.issues.http_overhead.ga-rollout", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE +) # System-wide options for default performance detection settings for any org opted into the performance-issues-ingest feature. Meant for rollout. @@ -1284,6 +1298,11 @@ default=100, flags=FLAG_AUTOMATOR_MODIFIABLE, ) # ms +register( + "performance.issues.http_overhead.http_request_delay_threshold", + default=500, + flags=FLAG_AUTOMATOR_MODIFIABLE, +) # ms # Dynamic Sampling system-wide options # Size of the sliding window used for dynamic sampling. It is defaulted to 24 hours. diff --git a/src/sentry/projectoptions/defaults.py b/src/sentry/projectoptions/defaults.py index c1a05d6b4c730b..051bd1d3422c97 100644 --- a/src/sentry/projectoptions/defaults.py +++ b/src/sentry/projectoptions/defaults.py @@ -113,6 +113,7 @@ "consecutive_db_queries_detection_enabled": True, "large_render_blocking_asset_detection_enabled": True, "slow_db_queries_detection_enabled": True, + "http_overhead_detection_enabled": True, } # A dict containing all the specific detection thresholds and rates. diff --git a/src/sentry/utils/performance_issues/base.py b/src/sentry/utils/performance_issues/base.py index 8959d178a8c18c..db18d819a93c5d 100644 --- a/src/sentry/utils/performance_issues/base.py +++ b/src/sentry/utils/performance_issues/base.py @@ -15,6 +15,7 @@ PerformanceConsecutiveHTTPQueriesGroupType, PerformanceDBMainThreadGroupType, PerformanceFileIOMainThreadGroupType, + PerformanceHTTPOverheadGroupType, PerformanceLargeHTTPPayloadGroupType, PerformanceMNPlusOneDBQueriesGroupType, PerformanceNPlusOneAPICallsGroupType, @@ -43,6 +44,7 @@ class DetectorType(Enum): M_N_PLUS_ONE_DB = "m_n_plus_one_db" UNCOMPRESSED_ASSETS = "uncompressed_assets" DB_MAIN_THREAD = "db_main_thread" + HTTP_OVERHEAD = "http_overhead" DETECTOR_TYPE_TO_GROUP_TYPE = { @@ -60,6 +62,7 @@ class DetectorType(Enum): DetectorType.CONSECUTIVE_HTTP_OP_EXTENDED: PerformanceConsecutiveHTTPQueriesGroupType, DetectorType.DB_MAIN_THREAD: PerformanceDBMainThreadGroupType, DetectorType.LARGE_HTTP_PAYLOAD: PerformanceLargeHTTPPayloadGroupType, + DetectorType.HTTP_OVERHEAD: PerformanceHTTPOverheadGroupType, } @@ -77,6 +80,7 @@ class DetectorType(Enum): DetectorType.RENDER_BLOCKING_ASSET_SPAN: "performance.issues.render_blocking_assets.problem-creation", DetectorType.M_N_PLUS_ONE_DB: "performance.issues.m_n_plus_one_db.problem-creation", DetectorType.DB_MAIN_THREAD: "performance.issues.db_main_thread.problem-creation", + DetectorType.HTTP_OVERHEAD: "performance.issues.http_overhead.problem-creation", } @@ -161,6 +165,12 @@ def is_event_eligible(cls, event, project: Optional[Project] = None) -> bool: return True +def does_overlap_previous_span(previous_span: Span, current_span: Span): + previous_span_ends = timedelta(seconds=previous_span.get("timestamp", 0)) + current_span_begins = timedelta(seconds=current_span.get("start_timestamp", 0)) + return previous_span_ends > current_span_begins + + def get_span_duration(span: Span) -> timedelta: return timedelta(seconds=span.get("timestamp", 0)) - timedelta( seconds=span.get("start_timestamp", 0) diff --git a/src/sentry/utils/performance_issues/detectors/__init__.py b/src/sentry/utils/performance_issues/detectors/__init__.py index da655d9188aeb7..393b859d50643f 100644 --- a/src/sentry/utils/performance_issues/detectors/__init__.py +++ b/src/sentry/utils/performance_issues/detectors/__init__.py @@ -1,5 +1,6 @@ from .consecutive_db_detector import ConsecutiveDBSpanDetector # NOQA from .consecutive_http_detector import ConsecutiveHTTPSpanDetector # NOQA +from .http_overhead_detector import HTTPOverheadDetector # NOQA from .io_main_thread_detector import DBMainThreadDetector, FileIOMainThreadDetector # NOQA from .large_payload_detector import LargeHTTPPayloadDetector # NOQA from .mn_plus_one_db_span_detector import MNPlusOneDBSpanDetector # NOQA diff --git a/src/sentry/utils/performance_issues/detectors/consecutive_http_detector.py b/src/sentry/utils/performance_issues/detectors/consecutive_http_detector.py index a717780510a8bd..575298c3e976cc 100644 --- a/src/sentry/utils/performance_issues/detectors/consecutive_http_detector.py +++ b/src/sentry/utils/performance_issues/detectors/consecutive_http_detector.py @@ -1,7 +1,5 @@ from __future__ import annotations -from datetime import timedelta - from sentry import features from sentry.issues.grouptype import PerformanceConsecutiveHTTPQueriesGroupType from sentry.issues.issue_occurrence import IssueEvidence @@ -16,6 +14,7 @@ from ..base import ( DetectorType, PerformanceDetector, + does_overlap_previous_span, fingerprint_http_spans, get_duration_between_spans, get_notification_attachment_body, @@ -128,12 +127,8 @@ def _store_performance_problem(self) -> None: def _overlaps_last_span(self, span: Span) -> bool: if len(self.consecutive_http_spans) == 0: return False - last_span = self.consecutive_http_spans[-1] - - last_span_ends = timedelta(seconds=last_span.get("timestamp", 0)) - current_span_begins = timedelta(seconds=span.get("start_timestamp", 0)) - return last_span_ends > current_span_begins + return does_overlap_previous_span(last_span, span) def _reset_variables(self) -> None: self.consecutive_http_spans = [] diff --git a/src/sentry/utils/performance_issues/detectors/http_overhead_detector.py b/src/sentry/utils/performance_issues/detectors/http_overhead_detector.py new file mode 100644 index 00000000000000..91401a6eb5fba3 --- /dev/null +++ b/src/sentry/utils/performance_issues/detectors/http_overhead_detector.py @@ -0,0 +1,189 @@ +from __future__ import annotations + +import urllib.parse +from collections import defaultdict +from dataclasses import dataclass +from typing import Optional + +from sentry import features +from sentry.issues.grouptype import PerformanceHTTPOverheadGroupType +from sentry.issues.issue_occurrence import IssueEvidence +from sentry.models import Organization, Project + +from ..base import ( + DetectorType, + PerformanceDetector, + does_overlap_previous_span, + get_notification_attachment_body, + get_span_evidence_value, +) +from ..performance_problem import PerformanceProblem +from ..types import Span + + +@dataclass +class ProblemIndicator: + """ + Keep span data that will be used to store the problem together. + Has a monotonic queue depth to know if spans hit the parallel limit without walking all spans again. + """ + + span: Span + delay: float + queue_depth: int = 0 + + +class HTTPOverheadDetector(PerformanceDetector): + """ + Detect HTTP/1.1 overhead in http spans outside of response time for that endpoint using + Browser reporting of PerformanceNavigationTiming. Parent-child span hierarchy + is ignored as there is an external limit (browser http/1.1 request limit) solely based on time. + + [-------- transaction -----------] + [0] https://service.io/api/book/1 - request delay ~0ms + ----[1] https://service.io/api/book/2 - request delay ~0ms + ----[2] https://service.io/api/book/3 - request delay ~0ms + ... + ----[5] https://service.io/api/book/6 - request delay 400ms - Hit ~6 connection limit. + ----[6] https://service.io/api/book/7 - request delay 600ms - Over threshold, triggers detection. + ... + """ + + __slots__ = "stored_problems" + + type = DetectorType.HTTP_OVERHEAD + settings_key = DetectorType.HTTP_OVERHEAD + + def init(self): + self.stored_problems: dict[str, PerformanceProblem] = {} + self.location_to_indicators = defaultdict(list) + + def visit_span(self, span: Span) -> None: + span_data = span.get("data", {}) + if not self._is_span_eligible(span) or not span_data: + return + + url = span_data.get("url", "") + span_start = span.get("start_timestamp", 0) * 1000 + request_start = span_data.get("http.request.request_start", 0) * 1000 + + if not url or not span_start or not request_start: + return + + if url.startswith("/"): + location = "/" + else: + parsed_url = urllib.parse.urlparse(url) + location = parsed_url.netloc + + if not location: + return + + request_delay = request_start - span_start + + if request_delay < 0: + # shouldn't be possible, but these values are browser reported + return + + indicators = self.location_to_indicators[location] + recent_beginning_of_chain = next( + filter(lambda i: i.queue_depth == 0, reversed(indicators)), None + ) + recent_end_of_chain = indicators[-1] if indicators else None + + if not recent_beginning_of_chain: + self.location_to_indicators[location] += [ProblemIndicator(span, request_delay, 0)] + return + + previous_delay = recent_beginning_of_chain.delay + previous_span = recent_beginning_of_chain.span + previous_monotonic = recent_end_of_chain.queue_depth if recent_end_of_chain else 0 + + is_overlapping = does_overlap_previous_span(previous_span, span) + new_monotonic = ( + previous_monotonic + 1 if request_delay >= previous_delay and is_overlapping else 0 + ) + + self.location_to_indicators[location] += [ + ProblemIndicator(span, request_delay, new_monotonic) + ] + + def _is_span_eligible(self, span: Span) -> bool: + span_op = span.get("op", None) + span_data = span.get("data", {}) + if not span_data: + return False + protocol_version = span_data.get("network.protocol.version", None) + + if not span_op or not span_op == "http.client" or not protocol_version == "1.1": + return False + return True + + def _store_performance_problem(self, location: str) -> None: + delay_threshold = self.settings.get("http_request_delay_threshold") + + # This isn't a threshold, it reduces noise in offending spans. + indicators = [ + indicator + for indicator in self.location_to_indicators[location] + if indicator.delay > 100 + ] + + location_spans = [indicator.span for indicator in indicators] + meets_min_queued = any( + indicator.queue_depth >= 5 for indicator in indicators + ) # Browsers queue past 4-6 connections. + exceeds_delay_threshold = any(indicator.delay > delay_threshold for indicator in indicators) + + if not exceeds_delay_threshold or not meets_min_queued or not location_spans: + return + + fingerprint = f"1-{PerformanceHTTPOverheadGroupType.type_id}-{location}" + example_span = location_spans[0] + desc: str = example_span.get("description", None) + + location_span_ids = [span.get("span_id", None) for span in location_spans] + + self.stored_problems[fingerprint] = PerformanceProblem( + fingerprint, + "http", + desc=desc, + type=PerformanceHTTPOverheadGroupType, + cause_span_ids=[], + parent_span_ids=None, + offender_span_ids=location_span_ids, + evidence_data={ + "parent_span_ids": [], + "cause_span_ids": [], + "offender_span_ids": location_span_ids, + "op": "http", + "transaction_name": self._event.get("transaction", ""), + "repeating_spans": get_span_evidence_value(example_span), + "repeating_spans_compact": get_span_evidence_value(example_span, include_op=False), + "num_repeating_spans": str(len(location_spans)), + }, + evidence_display=[ + IssueEvidence( + name="Offending Spans", + value=get_notification_attachment_body( + "http", + desc, + ), + important=True, + ) + ], + ) + + def on_complete(self) -> None: + for location in self.location_to_indicators: + self._store_performance_problem(location) + + def is_creation_allowed_for_organization(self, organization: Optional[Organization]) -> bool: + return features.has( + "organizations:performance-issues-http-overhead-detector", + organization, + actor=None, + ) + + def is_creation_allowed_for_project(self, project: Project) -> bool: + return self.settings["detection_enabled"] diff --git a/src/sentry/utils/performance_issues/performance_detection.py b/src/sentry/utils/performance_issues/performance_detection.py index 139a0ec6d9da9d..91f11597723ea5 100644 --- a/src/sentry/utils/performance_issues/performance_detection.py +++ b/src/sentry/utils/performance_issues/performance_detection.py @@ -28,6 +28,7 @@ ConsecutiveHTTPSpanDetector, DBMainThreadDetector, FileIOMainThreadDetector, + HTTPOverheadDetector, LargeHTTPPayloadDetector, MNPlusOneDBSpanDetector, NPlusOneAPICallsDetector, @@ -179,6 +180,9 @@ def get_merged_settings(project_id: Optional[int] = None) -> Dict[str | Any, Any "consecutive_db_min_time_saved_threshold": options.get( "performance.issues.consecutive_db.min_time_saved_threshold" ), + "http_request_delay_threshold": options.get( + "performance.issues.http_overhead.http_request_delay_threshold" + ), } default_project_settings = ( @@ -305,6 +309,10 @@ def get_detection_settings(project_id: Optional[int] = None) -> Dict[DetectorTyp "payload_size_threshold": settings["large_http_payload_size_threshold"], "detection_enabled": settings["large_http_payload_detection_enabled"], }, + DetectorType.HTTP_OVERHEAD: { + "http_request_delay_threshold": settings["http_request_delay_threshold"], + "detection_enabled": settings["http_overhead_detection_enabled"], + }, } @@ -330,6 +338,7 @@ def _detect_performance_problems( MNPlusOneDBSpanDetector(detection_settings, data), UncompressedAssetSpanDetector(detection_settings, data), LargeHTTPPayloadDetector(detection_settings, data), + HTTPOverheadDetector(detection_settings, data), ] for detector in detectors: diff --git a/tests/sentry/utils/performance_issues/test_http_overhead_detector.py b/tests/sentry/utils/performance_issues/test_http_overhead_detector.py new file mode 100644 index 00000000000000..d4a5d97256b02f --- /dev/null +++ b/tests/sentry/utils/performance_issues/test_http_overhead_detector.py @@ -0,0 +1,180 @@ +from __future__ import annotations + +from typing import Any + +import pytest + +from sentry.issues.grouptype import PerformanceHTTPOverheadGroupType +from sentry.testutils import TestCase +from sentry.testutils.performance_issues.event_generators import ( + PROJECT_ID, + create_span, + modify_span_start, +) +from sentry.testutils.silo import region_silo_test +from sentry.utils.performance_issues.detectors.http_overhead_detector import HTTPOverheadDetector +from sentry.utils.performance_issues.performance_detection import ( + get_detection_settings, + run_detector_on_data, +) +from sentry.utils.performance_issues.performance_problem import PerformanceProblem + + +def overhead_span(duration: float, request_start: float, url: str) -> dict[str, Any]: + return modify_span_start( + create_span( + "http.client", + desc=url, + duration=duration, + data={ + "url": url, + "network.protocol.version": "1.1", + "http.request.request_start": request_start / 1000.0, + }, + ), + 1, + ) + + +def _valid_http_overhead_event(url: str) -> dict[str, Any]: + return { + "event_id": "a" * 16, + "project": PROJECT_ID, + "spans": [ + overhead_span(1000, 100, url), + overhead_span(1000, 200, url), + overhead_span(1000, 300, url), + overhead_span(1000, 400, url), + overhead_span(1000, 500, url), + overhead_span(1000, 600, url), + ], + "contexts": { + "trace": { + "span_id": "c" * 16, + } + }, + "transaction": url, + } + + +def find_problems(settings, event: dict[str, Any]) -> list[PerformanceProblem]: + detector = HTTPOverheadDetector(settings, event) + run_detector_on_data(detector, event) + return list(detector.stored_problems.values()) + + +@region_silo_test +@pytest.mark.django_db +class HTTPOverheadDetectorTest(TestCase): + def setUp(self): + super().setUp() + self._settings = get_detection_settings() + + def find_problems(self, event): + return find_problems(self._settings, event) + + def test_detects_http_overhead(self): + event = _valid_http_overhead_event("/api/endpoint/123") + assert self.find_problems(event) == [ + PerformanceProblem( + fingerprint="1-1016-/", + op="http", + desc="/api/endpoint/123", + type=PerformanceHTTPOverheadGroupType, + parent_span_ids=None, + cause_span_ids=[], + offender_span_ids=[ + "bbbbbbbbbbbbbbbb", + "bbbbbbbbbbbbbbbb", + "bbbbbbbbbbbbbbbb", + "bbbbbbbbbbbbbbbb", + "bbbbbbbbbbbbbbbb", + ], + evidence_data={ + "op": "http", + "parent_span_ids": [], + "cause_span_ids": [], + "offender_span_ids": [ + "bbbbbbbbbbbbbbbb", + "bbbbbbbbbbbbbbbb", + "bbbbbbbbbbbbbbbb", + "bbbbbbbbbbbbbbbb", + "bbbbbbbbbbbbbbbb", + ], + }, + evidence_display=[], + ) + ] + + def test_does_not_detect_overlap_limit(self): + event = _valid_http_overhead_event("/api/endpoint/123") + + event["spans"] = event["spans"][:5] + assert self.find_problems(event) == [] + + def test_does_not_detect_under_delay_threshold(self): + url = "/api/endpoint/123" + event = _valid_http_overhead_event(url) + + event["spans"] = [ + overhead_span(1000, 1, url), + overhead_span(1000, 2, url), + overhead_span(1000, 3, url), + overhead_span(1000, 4, url), + overhead_span(1000, 5, url), + overhead_span(1000, 501, url), # Request start is at 1ms. + ] + assert self.find_problems(event) == [] + + def test_detect_non_http_1_1(self): + url = "/api/endpoint/123" + event = _valid_http_overhead_event(url) + + trigger_span = overhead_span(1000, 502, url) + event["spans"] = [ + overhead_span(1000, 1, url), + overhead_span(1000, 2, url), + overhead_span(1000, 3, url), + overhead_span(1000, 4, url), + overhead_span(1000, 5, url), + trigger_span, + ] + + assert len(self.find_problems(event)) == 1 + trigger_span["data"]["network.protocol.version"] = "h3" + + assert len(self.find_problems(event)) == 0 + + def test_detect_other_location(self): + url = "https://example.com/api/endpoint/123" + event = _valid_http_overhead_event(url) + assert self.find_problems(event) == [ + PerformanceProblem( + fingerprint="1-1016-example.com", + op="http", + desc="/api/endpoint/123", + type=PerformanceHTTPOverheadGroupType, + parent_span_ids=None, + cause_span_ids=[], + offender_span_ids=[ + "bbbbbbbbbbbbbbbb", + "bbbbbbbbbbbbbbbb", + "bbbbbbbbbbbbbbbb", + "bbbbbbbbbbbbbbbb", + "bbbbbbbbbbbbbbbb", + ], + evidence_data={ + "op": "http", + "parent_span_ids": [], + "cause_span_ids": [], + "offender_span_ids": [ + "bbbbbbbbbbbbbbbb", + "bbbbbbbbbbbbbbbb", + "bbbbbbbbbbbbbbbb", + "bbbbbbbbbbbbbbbb", + "bbbbbbbbbbbbbbbb", + ], + }, + evidence_display=[], + ) + ] diff --git a/tests/sentry/utils/performance_issues/test_performance_detection.py b/tests/sentry/utils/performance_issues/test_performance_detection.py index 4a00b960667dd3..aefbd3a3c0f65f 100644 --- a/tests/sentry/utils/performance_issues/test_performance_detection.py +++ b/tests/sentry/utils/performance_issues/test_performance_detection.py @@ -467,32 +467,22 @@ def test_reports_metrics_on_uncompressed_assets(self, incr_mock): ) in incr_mock.mock_calls ) - assert ( - call( - "performance.performance_issue.detected", - instance="True", - tags={ - "sdk_name": "sentry.javascript.react", - "consecutive_db": False, - "large_http_payload": False, - "consecutive_http": False, - "consecutive_http_ext": False, - "slow_db_query": False, - "render_blocking_assets": False, - "n_plus_one_db": False, - "n_plus_one_db_ext": False, - "file_io_main_thread": False, - "db_main_thread": False, - "n_plus_one_api_calls": False, - "n_plus_one_api_calls_ext": False, - "m_n_plus_one_db": False, - "uncompressed_assets": True, - "browser_name": "Chrome", - "is_early_adopter": False, - }, - ) - in incr_mock.mock_calls - ) + detection_calls = [ + call + for call in incr_mock.mock_calls + if call.args[0] == "performance.performance_issue.detected" + ] + assert len(detection_calls) == 1 + tags = detection_calls[0].kwargs["tags"] + + assert tags["uncompressed_assets"] + assert tags["sdk_name"] == "sentry.javascript.react" + assert not tags["is_early_adopter"] + assert tags["browser_name"] == "Chrome" + + # Ensure all other detections are set to false in tags + pre_checked_keys = ["sdk_name", "is_early_adopter", "browser_name", "uncompressed_assets"] + assert not any([v for k, v in tags.items() if k not in pre_checked_keys]) @region_silo_test From d4a1abda1f3e75277c65a97e8c44ba77ef52165e Mon Sep 17 00:00:00 2001 From: Lukas Stracke Date: Mon, 17 Jul 2023 15:02:01 +0200 Subject: [PATCH 02/67] ref(getting-started-docs): Migrate Node doc to sentry main repo (#52875) Migrate the basic Node (no framework) onboarding/getting started doc to to the Sentry main repo --- .../gettingStartedDoc/sdkDocumentation.tsx | 1 + .../app/gettingStartedDocs/node/node.spec.tsx | 20 ++++ static/app/gettingStartedDocs/node/node.tsx | 95 +++++++++++++++++++ 3 files changed, 116 insertions(+) create mode 100644 static/app/gettingStartedDocs/node/node.spec.tsx create mode 100644 static/app/gettingStartedDocs/node/node.tsx diff --git a/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx b/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx index 23ecca63a13e75..e9efcf0b1a877e 100644 --- a/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx +++ b/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx @@ -29,6 +29,7 @@ export const migratedDocs = [ 'minidump', 'native', 'native-qt', + 'node', ]; type SdkDocumentationProps = { diff --git a/static/app/gettingStartedDocs/node/node.spec.tsx b/static/app/gettingStartedDocs/node/node.spec.tsx new file mode 100644 index 00000000000000..f66d46d92ca7c5 --- /dev/null +++ b/static/app/gettingStartedDocs/node/node.spec.tsx @@ -0,0 +1,20 @@ +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import {StepTitle} from 'sentry/components/onboarding/gettingStartedDoc/step'; + +import {GettingStartedWithNode, steps} from './node'; + +describe('GettingStartedWithNode', function () { + it('all products are selected', function () { + const {container} = render(); + + // Steps + for (const step of steps()) { + expect( + screen.getByRole('heading', {name: step.title ?? StepTitle[step.type]}) + ).toBeInTheDocument(); + } + + expect(container).toSnapshot(); + }); +}); diff --git a/static/app/gettingStartedDocs/node/node.tsx b/static/app/gettingStartedDocs/node/node.tsx new file mode 100644 index 00000000000000..f55199837945ca --- /dev/null +++ b/static/app/gettingStartedDocs/node/node.tsx @@ -0,0 +1,95 @@ +import {Layout, LayoutProps} from 'sentry/components/onboarding/gettingStartedDoc/layout'; +import {ModuleProps} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation'; +import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step'; +import {getUploadSourceMapsStep} from 'sentry/components/onboarding/gettingStartedDoc/utils'; +import {t, tct} from 'sentry/locale'; + +const performanceOtherConfig = ` +// Performance Monitoring +tracesSampleRate: 1.0, // Capture 100% of the transactions, reduce in production!`; + +export const steps = ({ + sentryInitContent, +}: { + sentryInitContent?: string; +} = {}): LayoutProps['steps'] => [ + { + type: StepType.INSTALL, + description: t('Add the Sentry Node SDK as a dependency:'), + configurations: [ + { + language: 'bash', + code: ` +# Using yarn +yarn add @sentry/node + +# Using npm +npm install --save @sentry/node + `, + }, + ], + }, + { + type: StepType.CONFIGURE, + description: ( +

+ {tct( + "Initialize Sentry as early as possible in your application's lifecycle, for example in your [code:index.ts/js] entry point:", + {code: } + )} +

+ ), + configurations: [ + { + language: 'javascript', + code: ` + const Sentry = require("@sentry/node"); + // or use ESM import statements + // import * as Sentry from '@sentry/node'; + + Sentry.init({ + ${sentryInitContent} + }); + `, + }, + ], + }, + getUploadSourceMapsStep('https://docs.sentry.io/platforms/node/sourcemaps/'), + { + type: StepType.VERIFY, + description: t( + "This snippet contains an intentional error and can be used as a test to make sure that everything's working as expected." + ), + configurations: [ + { + language: 'javascript', + code: ` + const transaction = Sentry.startTransaction({ + op: "test", + name: "My First Test Transaction", + }); + + setTimeout(() => { + try { + foo(); + } catch (e) { + Sentry.captureException(e); + } finally { + transaction.finish(); + } + }, 99); + `, + }, + ], + }, +]; + +export function GettingStartedWithNode({dsn, ...props}: ModuleProps) { + const sentryInitContent: string[] = [`dsn: "${dsn}",`, performanceOtherConfig]; + + return ( + + ); +} + +export default GettingStartedWithNode; From 14d014a45183291e3966cb5123b5f756de825bd6 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Mon, 17 Jul 2023 09:33:56 -0400 Subject: [PATCH 03/67] ref: remove unused smtp patch (#52887) this patch doesn't apply to 3.8 anyway -- and 3.8 handles EHLO --- src/sentry/services/smtp.py | 19 +------------------ 1 file changed, 1 insertion(+), 18 deletions(-) diff --git a/src/sentry/services/smtp.py b/src/sentry/services/smtp.py index c0bbf4fea413cf..05ad06008d0812 100644 --- a/src/sentry/services/smtp.py +++ b/src/sentry/services/smtp.py @@ -1,7 +1,7 @@ import asyncore import email import logging -from smtpd import SMTPChannel, SMTPServer +from smtpd import SMTPServer from email_reply_parser import EmailReplyParser @@ -11,23 +11,6 @@ logger = logging.getLogger(__name__) - -# HACK(mattrobenolt): literally no idea what I'm doing. Mostly made this up. -# SMTPChannel doesn't support EHLO response, but nginx requires an EHLO. -# EHLO is available in python 3, so this is backported somewhat -def smtp_EHLO(self, arg): - if not arg: - self.push("501 Syntax: EHLO hostname") - return - if self._SMTPChannel__greeting: - self.push("503 Duplicate HELO/EHLO") - else: - self._SMTPChannel__greeting = arg - self.push("250 %s" % self._SMTPChannel__fqdn) - - -SMTPChannel.smtp_EHLO = smtp_EHLO # type: ignore[method-assign] - STATUS = {200: "200 Ok", 550: "550 Not found", 552: "552 Message too long"} From 38cdc653a7c2a6de7c2f4af5c740ed562bcf759e Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Mon, 17 Jul 2023 15:47:43 +0200 Subject: [PATCH 04/67] ref(ts): Convert test newProject to typescript (#52963) --- fixtures/js-stubs/{ruleConditions.js => ruleConditions.ts} | 0 fixtures/js-stubs/types.tsx | 4 ++-- static/app/views/projectInstall/createProject.spec.tsx | 1 - .../{newProject.spec.jsx => newProject.spec.tsx} | 0 4 files changed, 2 insertions(+), 3 deletions(-) rename fixtures/js-stubs/{ruleConditions.js => ruleConditions.ts} (100%) rename static/app/views/projectInstall/{newProject.spec.jsx => newProject.spec.tsx} (100%) diff --git a/fixtures/js-stubs/ruleConditions.js b/fixtures/js-stubs/ruleConditions.ts similarity index 100% rename from fixtures/js-stubs/ruleConditions.js rename to fixtures/js-stubs/ruleConditions.ts diff --git a/fixtures/js-stubs/types.tsx b/fixtures/js-stubs/types.tsx index de74a87aa4f1c4..9743eeefebdcc6 100644 --- a/fixtures/js-stubs/types.tsx +++ b/fixtures/js-stubs/types.tsx @@ -6,6 +6,7 @@ import type { } from 'sentry/views/replays/types'; import type {Replay} from './replay'; +import {MOCK_RESP_VERBOSE} from './ruleConditions'; type SimpleStub = () => T; @@ -87,6 +88,7 @@ type TestStubFixtures = { InstallWizard: OverridableStub; JiraIntegration: OverridableStub; JiraIntegrationProvider: OverridableStub; + MOCK_RESP_VERBOSE: typeof MOCK_RESP_VERBOSE; Member: OverridableStub; Members: OverridableStubList; MetricRule: OverridableStub; @@ -202,8 +204,6 @@ type TestStubFixtures = { // AsanaAutocomplete(type = 'project', values = [DEFAULT_AUTOCOMPLETE]) // PhabricatorAutocomplete(type = 'project', values = null) // RoleList(params = [], fullAccess = false) - - // const MOCK_RESP_VERBOSE // const MOCK_RESP_ONLY_IGNORED_CONDITIONS_INVALID // const MOCK_RESP_INCONSISTENT_PLACEHOLDERS // const MOCK_RESP_INCONSISTENT_INTERVALS diff --git a/static/app/views/projectInstall/createProject.spec.tsx b/static/app/views/projectInstall/createProject.spec.tsx index a9bb5657c4859f..5e106e17887047 100644 --- a/static/app/views/projectInstall/createProject.spec.tsx +++ b/static/app/views/projectInstall/createProject.spec.tsx @@ -479,7 +479,6 @@ describe('CreateProject', function () { MockApiClient.addMockResponse({ url: `/projects/${organization.slug}/rule-conditions/`, - // @ts-expect-error TODO: fix this type body: TestStubs.MOCK_RESP_VERBOSE, }); }); diff --git a/static/app/views/projectInstall/newProject.spec.jsx b/static/app/views/projectInstall/newProject.spec.tsx similarity index 100% rename from static/app/views/projectInstall/newProject.spec.jsx rename to static/app/views/projectInstall/newProject.spec.tsx From 501d8cb0879f7c11d9ef29ceea3151566fa72788 Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Mon, 17 Jul 2023 15:48:35 +0200 Subject: [PATCH 05/67] ref(ts): Convert test organizationDetails to typescript (#52964) --- .../{index.spec.jsx => index.spec.tsx} | 22 ++++++++++--------- .../app/views/organizationDetails/index.tsx | 2 +- 2 files changed, 13 insertions(+), 11 deletions(-) rename static/app/views/organizationDetails/{index.spec.jsx => index.spec.tsx} (88%) diff --git a/static/app/views/organizationDetails/index.spec.jsx b/static/app/views/organizationDetails/index.spec.tsx similarity index 88% rename from static/app/views/organizationDetails/index.spec.jsx rename to static/app/views/organizationDetails/index.spec.tsx index 9a936f12f4edad..8cb5414ee1094c 100644 --- a/static/app/views/organizationDetails/index.spec.jsx +++ b/static/app/views/organizationDetails/index.spec.tsx @@ -1,4 +1,5 @@ -import {act, render, screen, waitFor} from 'sentry-test/reactTestingLibrary'; +import {initializeOrg} from 'sentry-test/initializeOrg'; +import {render, screen, waitFor} from 'sentry-test/reactTestingLibrary'; import {pinFilter} from 'sentry/actionCreators/pageFilters'; import OrganizationStore from 'sentry/stores/organizationStore'; @@ -15,12 +16,14 @@ jest.mock( ); describe('OrganizationDetails', function () { + const {routerProps} = initializeOrg(); + let getTeamsMock; let getProjectsMock; beforeEach(function () { OrganizationStore.reset(); - act(() => ProjectsStore.reset()); + ProjectsStore.reset(); PageFiltersStore.reset(); MockApiClient.clearMockResponses(); @@ -52,9 +55,8 @@ describe('OrganizationDetails', function () { render(
@@ -79,7 +81,7 @@ describe('OrganizationDetails', function () { }); render( - +
); @@ -107,7 +109,7 @@ describe('OrganizationDetails', function () { }); render( - +
); @@ -136,7 +138,7 @@ describe('OrganizationDetails', function () { }); render( - +
); @@ -158,7 +160,7 @@ describe('OrganizationDetails', function () { MockApiClient.addMockResponse({url: '/organizations/other-org/projects/', body: []}); const {rerender} = render( - +
); @@ -169,7 +171,7 @@ describe('OrganizationDetails', function () { ); rerender( - +
); @@ -177,7 +179,7 @@ describe('OrganizationDetails', function () { expect(PageFiltersStore.getState().pinnedFilters).toEqual(new Set(['projects'])); rerender( - +
); diff --git a/static/app/views/organizationDetails/index.tsx b/static/app/views/organizationDetails/index.tsx index bd85e7ae966d89..ca565a4c854e5c 100644 --- a/static/app/views/organizationDetails/index.tsx +++ b/static/app/views/organizationDetails/index.tsx @@ -7,7 +7,7 @@ import OrganizationContextContainer from 'sentry/views/organizationContextContai import Body from './body'; -type Props = RouteComponentProps<{orgId: string}, {}> & +type Props = RouteComponentProps<{orgId?: string}, {}> & Partial>; function OrganizationDetails({children, ...props}: Props) { From d36bce3c3f9474c0ef943e586daf6c8790eded36 Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Mon, 17 Jul 2023 15:48:56 +0200 Subject: [PATCH 06/67] ref(ts): Convert test awsLambdaCloudformation to typescript (#52965) --- ...spec.jsx => awsLambdaCloudformation.spec.tsx} | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) rename static/app/views/integrationPipeline/{awsLambdaCloudformation.spec.jsx => awsLambdaCloudformation.spec.tsx} (74%) diff --git a/static/app/views/integrationPipeline/awsLambdaCloudformation.spec.jsx b/static/app/views/integrationPipeline/awsLambdaCloudformation.spec.tsx similarity index 74% rename from static/app/views/integrationPipeline/awsLambdaCloudformation.spec.jsx rename to static/app/views/integrationPipeline/awsLambdaCloudformation.spec.tsx index 5f4cf999eb8286..b93a09eb037ed7 100644 --- a/static/app/views/integrationPipeline/awsLambdaCloudformation.spec.jsx +++ b/static/app/views/integrationPipeline/awsLambdaCloudformation.spec.tsx @@ -1,7 +1,7 @@ import selectEvent from 'react-select-event'; import * as qs from 'query-string'; -import {fireEvent, render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; import AwsLambdaCloudformation from 'sentry/views/integrationPipeline/awsLambdaCloudformation'; @@ -17,6 +17,7 @@ describe('AwsLambdaCloudformation', () => { it('submit arn', async () => { render( { // Open configuration fields await userEvent.click(screen.getByRole('button', {name: "I've created the stack"})); - // XXX(epurkhiser): This form is pretty wonky with how it works, and - // probably needs cleaned up again in the future. I couldn't get - // await userEvent.type to work here because of something relating to the - // validation I think. - // Fill out fields - const accountNumber = screen.getByRole('textbox', {name: 'AWS Account Number'}); - fireEvent.change(accountNumber, {target: {value: '599817902985'}}); + await userEvent.type( + screen.getByRole('textbox', {name: 'AWS Account Number'}), + '599817902985' + ); await selectEvent.select(screen.getByRole('textbox', {name: 'AWS Region'}), [ - ['us-west-1'], + 'us-west-1', ]); expect(screen.getByRole('button', {name: 'Next'})).toBeEnabled(); From e719c103734b91dda017cb0a0c5e5c404fccfa8c Mon Sep 17 00:00:00 2001 From: Francesco Novy Date: Mon, 17 Jul 2023 16:10:59 +0200 Subject: [PATCH 07/67] feat(loader): Add new loader settings page (#52512) --------- Co-authored-by: Daniel Griesser Co-authored-by: Priscila Oliveira --- static/app/routes.tsx | 5 + .../settings/project/loaderScript.spec.tsx | 341 ++++++++++++++++++ .../views/settings/project/loaderScript.tsx | 140 +++++++ .../project/navigationConfiguration.tsx | 5 + .../projectKeys/details/loaderSettings.tsx | 10 +- 5 files changed, 496 insertions(+), 5 deletions(-) create mode 100644 static/app/views/settings/project/loaderScript.spec.tsx create mode 100644 static/app/views/settings/project/loaderScript.tsx diff --git a/static/app/routes.tsx b/static/app/routes.tsx index 9c6051074e159a..68c3c1d66643fd 100644 --- a/static/app/routes.tsx +++ b/static/app/routes.tsx @@ -588,6 +588,11 @@ function buildRoutes() { )} /> + import('sentry/views/settings/project/loaderScript'))} + /> ); + + await waitForElementToBeRemoved(() => screen.queryByTestId('loading-indicator')); + + expect(screen.getByTestId('loading-error')).toHaveTextContent( + 'Failed to load project keys.' + ); + }); + + it('renders empty', async function () { + const {organization, project} = initializeOrg(); + + mockApi({organization, project, projectKeys: []}); + + render(); + + await waitForElementToBeRemoved(() => screen.queryByTestId('loading-indicator')); + + expect( + screen.getByText('There are no keys active for this project.') + ).toBeInTheDocument(); + }); + + it('renders for single project', async function () { + const {organization, project} = initializeOrg(); + const projectKey = TestStubs.ProjectKeys()[0]; + const projectKeys = [projectKey]; + + mockApi({organization, project, projectKeys}); + + render(); + + await waitForElementToBeRemoved(() => screen.queryByTestId('loading-indicator')); + + // Loader Script is rendered + expect(screen.getByText(`Client Key: ${projectKey.name}`)).toBeInTheDocument(); + const loaderScript = screen.getByRole('textbox', { + name: 'Loader Script', + }) as HTMLInputElement; + const loaderScriptValue = loaderScript.value; + expect(loaderScriptValue).toEqual(expect.stringContaining(projectKeys[0].dsn.cdn)); + }); + + it('renders multiple keys', async function () { + const {organization, project} = initializeOrg(); + const projectKeys = TestStubs.ProjectKeys([ + { + dsn: { + secret: + 'http://188ee45a58094d939428d8585aa6f662:a33bf9aba64c4bbdaf873bb9023b6d2c@dev.getsentry.net:8000/1', + minidump: + 'http://dev.getsentry.net:8000/api/1/minidump?sentry_key=188ee45a58094d939428d8585aa6f662', + public: 'http://188ee45a58094d939428d8585aa6f662@dev.getsentry.net:8000/1', + csp: 'http://dev.getsentry.net:8000/api/1/csp-report/?sentry_key=188ee45a58094d939428d8585aa6f662', + security: + 'http://dev.getsentry.net:8000/api/1/security-report/?sentry_key=188ee45a58094d939428d8585aa6f662', + }, + public: '188ee45a58094d939428d8585aa6f662', + secret: 'a33bf9aba64c4bbdaf873bb9023b6d2c', + name: 'Key 2', + rateLimit: null, + projectId: 1, + dateCreated: '2018-02-28T07:13:51.087Z', + id: '188ee45a58094d939428d8585aa6f662', + isActive: true, + label: 'Key 2', + browserSdkVersion: 'latest', + browserSdk: { + choices: [ + ['latest', 'latest'], + ['7.x', '7.x'], + ['6.x', '6.x'], + ['5.x', '5.x'], + ['4.x', '4.x'], + ], + }, + dynamicSdkLoaderOptions: { + hasPerformance: false, + hasReplay: false, + hasDebug: false, + }, + }, + ]); + + mockApi({organization, project, projectKeys}); + + render(); + + await waitForElementToBeRemoved(() => screen.queryByTestId('loading-indicator')); + + expect(screen.getByText(`Client Key: ${projectKeys[0].name}`)).toBeInTheDocument(); + expect(screen.getByText(`Client Key: ${projectKeys[1].name}`)).toBeInTheDocument(); + + const allLoaderScripts = screen.getAllByRole('textbox', { + name: 'Loader Script', + }) as HTMLInputElement[]; + + expect(allLoaderScripts).toHaveLength(2); + }); + + it('allows to update key settings', async function () { + const {organization, project} = initializeOrg(); + const baseKey = TestStubs.ProjectKeys()[0]; + const projectKey = { + ...baseKey, + dynamicSdkLoaderOptions: { + ...baseKey.dynamicSdkLoaderOptions, + hasReplay: true, + }, + }; + + mockApi({organization, project, projectKeys: [projectKey]}); + + const mockPut = MockApiClient.addMockResponse({ + url: `/projects/${organization.slug}/${project.slug}/keys/${projectKey.id}/`, + method: 'PUT', + body: { + ...projectKey, + dynamicSdkLoaderOptions: { + ...projectKey.dynamicSdkLoaderOptions, + hasPerformance: true, + }, + }, + }); + + render(); + + await waitForElementToBeRemoved(() => screen.queryByTestId('loading-indicator')); + + expect(screen.getByText(t('Enable Performance Monitoring'))).toBeInTheDocument(); + expect(screen.getByText(t('Enable Session Replay'))).toBeInTheDocument(); + expect(screen.getByText(t('Enable Debug Bundles & Logging'))).toBeInTheDocument(); + + let performanceCheckbox = screen.getByRole('checkbox', { + name: t('Enable Performance Monitoring'), + }); + expect(performanceCheckbox).toBeEnabled(); + expect(performanceCheckbox).not.toBeChecked(); + + const replayCheckbox = screen.getByRole('checkbox', { + name: t('Enable Session Replay'), + }); + expect(replayCheckbox).toBeEnabled(); + expect(replayCheckbox).toBeChecked(); + + const debugCheckbox = screen.getByRole('checkbox', { + name: t('Enable Debug Bundles & Logging'), + }); + expect(debugCheckbox).toBeEnabled(); + expect(debugCheckbox).not.toBeChecked(); + + // Toggle performance option + await userEvent.click( + screen.getByRole('checkbox', { + name: t('Enable Performance Monitoring'), + }) + ); + + performanceCheckbox = await screen.findByRole('checkbox', { + name: t('Enable Performance Monitoring'), + checked: true, + }); + expect(performanceCheckbox).toBeEnabled(); + expect(performanceCheckbox).toBeChecked(); + + expect(mockPut).toHaveBeenCalledWith( + `/projects/${organization.slug}/${project.slug}/keys/${projectKey.id}/`, + expect.objectContaining({ + data: expect.objectContaining({ + dynamicSdkLoaderOptions: { + ...projectKey.dynamicSdkLoaderOptions, + hasPerformance: true, + }, + }), + }) + ); + }); + + it('allows to update one of multiple keys', async function () { + const {organization, project} = initializeOrg(); + const projectKeys = TestStubs.ProjectKeys([ + { + dsn: { + secret: + 'http://188ee45a58094d939428d8585aa6f662:a33bf9aba64c4bbdaf873bb9023b6d2c@dev.getsentry.net:8000/1', + minidump: + 'http://dev.getsentry.net:8000/api/1/minidump?sentry_key=188ee45a58094d939428d8585aa6f662', + public: 'http://188ee45a58094d939428d8585aa6f662@dev.getsentry.net:8000/1', + csp: 'http://dev.getsentry.net:8000/api/1/csp-report/?sentry_key=188ee45a58094d939428d8585aa6f662', + security: + 'http://dev.getsentry.net:8000/api/1/security-report/?sentry_key=188ee45a58094d939428d8585aa6f662', + }, + public: '188ee45a58094d939428d8585aa6f662', + secret: 'a33bf9aba64c4bbdaf873bb9023b6d2c', + name: 'Key 2', + rateLimit: null, + projectId: 1, + dateCreated: '2018-02-28T07:13:51.087Z', + id: '188ee45a58094d939428d8585aa6f662', + isActive: true, + label: 'Key 2', + browserSdkVersion: 'latest', + browserSdk: { + choices: [ + ['latest', 'latest'], + ['7.x', '7.x'], + ['6.x', '6.x'], + ['5.x', '5.x'], + ['4.x', '4.x'], + ], + }, + dynamicSdkLoaderOptions: { + hasPerformance: false, + hasReplay: false, + hasDebug: false, + }, + }, + ]); + const projectKey = projectKeys[1]; + + mockApi({organization, project, projectKeys}); + const mockPut = MockApiClient.addMockResponse({ + url: `/projects/${organization.slug}/${project.slug}/keys/${projectKey.id}/`, + method: 'PUT', + body: { + ...projectKey, + dynamicSdkLoaderOptions: { + ...projectKey.dynamicSdkLoaderOptions, + hasPerformance: true, + }, + }, + }); + + render(); + + await waitForElementToBeRemoved(() => screen.queryByTestId('loading-indicator')); + + expect( + screen.getAllByRole('checkbox', { + name: t('Enable Performance Monitoring'), + checked: false, + }) + ).toHaveLength(2); + expect( + screen.getAllByRole('checkbox', { + name: t('Enable Session Replay'), + checked: false, + }) + ).toHaveLength(2); + expect( + screen.getAllByRole('checkbox', { + name: t('Enable Debug Bundles & Logging'), + checked: false, + }) + ).toHaveLength(2); + + // Toggle performance option + await userEvent.click( + screen.getAllByRole('checkbox', { + name: t('Enable Performance Monitoring'), + })[1] + ); + + expect( + await screen.findByRole('checkbox', { + name: t('Enable Performance Monitoring'), + checked: true, + }) + ).toBeInTheDocument(); + + expect( + screen.getByRole('checkbox', { + name: t('Enable Performance Monitoring'), + checked: false, + }) + ).toBeInTheDocument(); + expect( + screen.getAllByRole('checkbox', { + name: t('Enable Session Replay'), + checked: false, + }) + ).toHaveLength(2); + expect( + screen.getAllByRole('checkbox', { + name: t('Enable Debug Bundles & Logging'), + checked: false, + }) + ).toHaveLength(2); + + expect(mockPut).toHaveBeenCalledWith( + `/projects/${organization.slug}/${project.slug}/keys/${projectKey.id}/`, + expect.objectContaining({ + data: expect.objectContaining({ + dynamicSdkLoaderOptions: { + ...projectKey.dynamicSdkLoaderOptions, + hasPerformance: true, + }, + }), + }) + ); + }); +}); diff --git a/static/app/views/settings/project/loaderScript.tsx b/static/app/views/settings/project/loaderScript.tsx new file mode 100644 index 00000000000000..36e1591006490c --- /dev/null +++ b/static/app/views/settings/project/loaderScript.tsx @@ -0,0 +1,140 @@ +import {Fragment, useCallback, useState} from 'react'; + +import {LinkButton} from 'sentry/components/button'; +import EmptyMessage from 'sentry/components/emptyMessage'; +import ExternalLink from 'sentry/components/links/externalLink'; +import Link from 'sentry/components/links/link'; +import LoadingError from 'sentry/components/loadingError'; +import LoadingIndicator from 'sentry/components/loadingIndicator'; +import Panel from 'sentry/components/panels/panel'; +import PanelAlert from 'sentry/components/panels/panelAlert'; +import PanelBody from 'sentry/components/panels/panelBody'; +import PanelHeader from 'sentry/components/panels/panelHeader'; +import {t, tct} from 'sentry/locale'; +import {Organization, Project} from 'sentry/types'; +import {useApiQuery} from 'sentry/utils/queryClient'; +import useOrganization from 'sentry/utils/useOrganization'; +import SettingsPageHeader from 'sentry/views/settings/components/settingsPageHeader'; +import TextBlock from 'sentry/views/settings/components/text/textBlock'; +import {LoaderSettings} from 'sentry/views/settings/project/projectKeys/details/loaderSettings'; +import {ProjectKey} from 'sentry/views/settings/project/projectKeys/types'; + +export function ProjectLoaderScript({project}: {project: Project}) { + const organization = useOrganization(); + const apiEndpoint = `/projects/${organization.slug}/${project.slug}/keys/`; + const [updatedProjectKeys, setUpdatedProjectKeys] = useState([]); + + const { + data: projectKeys, + isLoading, + error, + refetch: refetchProjectKeys, + } = useApiQuery([apiEndpoint], { + staleTime: 0, + }); + + const handleUpdateProjectKey = useCallback( + (projectKey: ProjectKey) => { + const existingProjectIndex = updatedProjectKeys.findIndex( + key => key.id === projectKey.id + ); + const newUpdatedProjectKeys = + existingProjectIndex > -1 + ? [...updatedProjectKeys].map((updatedProjectKey, index) => { + return index === existingProjectIndex ? projectKey : updatedProjectKey; + }) + : [...updatedProjectKeys, projectKey]; + + setUpdatedProjectKeys(newUpdatedProjectKeys); + }, + [updatedProjectKeys] + ); + + return ( + + + + + {tct( + 'The Loader Script is the easiest way to initialize the Sentry SDK. The Loader Script automatically keeps your Sentry SDK up to date and offers configuration for different Sentry features. [docsLink:Learn more about the Loader Script]. Note: The Loader Script is bound to a Client Key (DSN), to create a new Script, go to the [clientKeysLink:Client Keys page].', + { + docsLink: ( + + ), + clientKeysLink: ( + + ), + } + )} + + + {isLoading && } + {!!error && ( + + )} + {!isLoading && !error && !projectKeys?.length && ( + + )} + + {projectKeys?.map(key => { + const actualKey = + updatedProjectKeys.find(updatedKey => updatedKey.id === key.id) ?? key; + return ( + + ); + })} + + ); +} + +function LoaderItem({ + organization, + project, + projectKey, + onUpdateProjectKey, +}: { + onUpdateProjectKey: (projectKey: ProjectKey) => void; + organization: Organization; + project: Project; + projectKey: ProjectKey; +}) { + return ( + + + {tct('Client Key: [name]', {name: projectKey.name})} + + + {t('View Key Details')} + + + + + {t('Note that it can take a few minutes until changed options are live.')} + + + + + + ); +} + +export default ProjectLoaderScript; diff --git a/static/app/views/settings/project/navigationConfiguration.tsx b/static/app/views/settings/project/navigationConfiguration.tsx index f87220c05837b6..265194f0146a8c 100644 --- a/static/app/views/settings/project/navigationConfiguration.tsx +++ b/static/app/views/settings/project/navigationConfiguration.tsx @@ -122,6 +122,11 @@ export default function getConfiguration({ title: t('Client Keys (DSN)'), description: t("View and manage the project's client keys (DSN)"), }, + { + path: `${pathPrefix}/loader-script/`, + title: t('Loader Script'), + description: t("View and manage the project's Loader Script"), + }, { path: `${pathPrefix}/release-tracking/`, title: t('Releases'), diff --git a/static/app/views/settings/project/projectKeys/details/loaderSettings.tsx b/static/app/views/settings/project/projectKeys/details/loaderSettings.tsx index 079ad71cf7bbc7..5cebf059be799b 100644 --- a/static/app/views/settings/project/projectKeys/details/loaderSettings.tsx +++ b/static/app/views/settings/project/projectKeys/details/loaderSettings.tsx @@ -141,13 +141,13 @@ export function LoaderSettings({keyId, orgSlug, project, data, updateData}: Prop inline={false} flexibleControlStateSize > - + {``} { updateLoaderOption({hasDebug: value}); From f89ccc939db0370b177fdeb0a71492f516607ff6 Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Mon, 17 Jul 2023 10:32:57 -0400 Subject: [PATCH 08/67] ref(typescript): Convert a few test stubs (#52921) - TestStubs.Span - TestStubs.Tags - TestStubs.Subscriptions - TestStubs.TagValues - TestStubs.TeamAlertsTriggered - TestStubs.IssuesBreakdown - TestStubs.TeamAlertsTriggered - TestStubs.Tombstones --- fixtures/js-stubs/{span.js => span.ts} | 5 +- .../{subscriptions.js => subscriptions.ts} | 4 +- fixtures/js-stubs/{tags.js => tags.ts} | 16 ++++- .../js-stubs/{tagvalues.js => tagvalues.ts} | 10 ++-- ...rtsTriggered.js => teamAlertsTriggered.ts} | 4 +- ...uesBreakdown.js => teamIssuesBreakdown.ts} | 32 +--------- fixtures/js-stubs/tombstones.js | 60 ------------------- fixtures/js-stubs/tombstones.ts | 30 ++++++++++ .../teamInsights/teamAlertsTriggered.tsx | 2 +- .../teamInsights/teamIssuesBreakdown.tsx | 2 +- .../settings/account/accountSubscriptions.tsx | 2 +- 11 files changed, 65 insertions(+), 102 deletions(-) rename fixtures/js-stubs/{span.js => span.ts} (62%) rename fixtures/js-stubs/{subscriptions.js => subscriptions.ts} (81%) rename fixtures/js-stubs/{tags.js => tags.ts} (85%) rename fixtures/js-stubs/{tagvalues.js => tagvalues.ts} (70%) rename fixtures/js-stubs/{teamAlertsTriggered.js => teamAlertsTriggered.ts} (91%) rename fixtures/js-stubs/{teamIssuesBreakdown.js => teamIssuesBreakdown.ts} (89%) delete mode 100644 fixtures/js-stubs/tombstones.js create mode 100644 fixtures/js-stubs/tombstones.ts diff --git a/fixtures/js-stubs/span.js b/fixtures/js-stubs/span.ts similarity index 62% rename from fixtures/js-stubs/span.js rename to fixtures/js-stubs/span.ts index 352fc31039fae5..3f23f2bffd7293 100644 --- a/fixtures/js-stubs/span.js +++ b/fixtures/js-stubs/span.ts @@ -1,4 +1,6 @@ -export function Span(params = {}) { +import type {RawSpanType} from 'sentry/components/events/interfaces/spans/types'; + +export function Span(params = {}): RawSpanType { return { timestamp: 1657201239.51, start_timestamp: 1657201239.503, @@ -6,6 +8,7 @@ export function Span(params = {}) { span_id: 'a385d9fd52e0c4bc', parent_span_id: 'bdf1a9fae2062311', trace_id: '4d5c2e2102234a7d94102b4f1e41c2bb', + data: {}, ...params, }; } diff --git a/fixtures/js-stubs/subscriptions.js b/fixtures/js-stubs/subscriptions.ts similarity index 81% rename from fixtures/js-stubs/subscriptions.js rename to fixtures/js-stubs/subscriptions.ts index 6540605bfdd72c..c453f7821f2f42 100644 --- a/fixtures/js-stubs/subscriptions.js +++ b/fixtures/js-stubs/subscriptions.ts @@ -1,4 +1,6 @@ -export function Subscriptions(params = []) { +import type {Subscription} from 'sentry/views/settings/account/accountSubscriptions'; + +export function Subscriptions(params = []): Subscription[] { return [ { subscribedDate: '2018-01-08T05:14:59.102Z', diff --git a/fixtures/js-stubs/tags.js b/fixtures/js-stubs/tags.ts similarity index 85% rename from fixtures/js-stubs/tags.js rename to fixtures/js-stubs/tags.ts index b0b512486acc82..da40b2cbf31c56 100644 --- a/fixtures/js-stubs/tags.js +++ b/fixtures/js-stubs/tags.ts @@ -1,4 +1,6 @@ -export function Tags(params = []) { +import type {TagWithTopValues} from 'sentry/types'; + +export function Tags(params = []): TagWithTopValues[] { return [ { topValues: [ @@ -60,11 +62,21 @@ export function Tags(params = []) { canDelete: true, }, { - topValues: [{name: 'prod', value: 'prod', key: 'environment', count: 100}], + topValues: [ + { + name: 'prod', + value: 'prod', + key: 'environment', + count: 100, + lastSeen: '2018-12-20T23:32:25Z', + firstSeen: '2018-05-06T03:48:28.825Z', + }, + ], key: 'environment', name: 'Environment', canDelete: false, totalValues: 100, + uniqueValues: 1, }, { topValues: [ diff --git a/fixtures/js-stubs/tagvalues.js b/fixtures/js-stubs/tagvalues.ts similarity index 70% rename from fixtures/js-stubs/tagvalues.js rename to fixtures/js-stubs/tagvalues.ts index 727ce15c511ad8..05355821df9b5b 100644 --- a/fixtures/js-stubs/tagvalues.js +++ b/fixtures/js-stubs/tagvalues.ts @@ -1,9 +1,9 @@ -export function TagValues(params = []) { +import type {TagValue} from 'sentry/types'; + +export function TagValues(params = []): TagValue[] { return [ { username: 'david', - hash: '172522ec1028ab781d9dfd17eaca4427', - dateCreated: '2018-10-03T03:39:51.223Z', lastSeen: '2018-12-20T23:32:25Z', query: 'user.username:david', id: '10799', @@ -13,10 +13,10 @@ export function TagValues(params = []) { avatarUrl: 'https://secure.gravatar.com/avatar/d66694bbc7619203377bd9c9b7b9f14a?s=32&d=mm', value: 'username:david', - tagValue: 'username:david', - identifier: null, + identifier: undefined, ipAddress: '128.126.232.84', email: 'david@example.com', + ip_address: '0.0.0.0', }, ...params, ]; diff --git a/fixtures/js-stubs/teamAlertsTriggered.js b/fixtures/js-stubs/teamAlertsTriggered.ts similarity index 91% rename from fixtures/js-stubs/teamAlertsTriggered.js rename to fixtures/js-stubs/teamAlertsTriggered.ts index b55576ea7a520a..aba4626e7dd5b1 100644 --- a/fixtures/js-stubs/teamAlertsTriggered.js +++ b/fixtures/js-stubs/teamAlertsTriggered.ts @@ -1,4 +1,6 @@ -export function TeamAlertsTriggered() { +import type {AlertsTriggered} from 'sentry/views/organizationStats/teamInsights/teamAlertsTriggered'; + +export function TeamAlertsTriggered(): AlertsTriggered { return { '2021-08-20T00:00:00Z': 3, '2021-08-21T00:00:00Z': 2, diff --git a/fixtures/js-stubs/teamIssuesBreakdown.js b/fixtures/js-stubs/teamIssuesBreakdown.ts similarity index 89% rename from fixtures/js-stubs/teamIssuesBreakdown.js rename to fixtures/js-stubs/teamIssuesBreakdown.ts index 3f1ba905e5435e..89378239435378 100644 --- a/fixtures/js-stubs/teamIssuesBreakdown.js +++ b/fixtures/js-stubs/teamIssuesBreakdown.ts @@ -1,8 +1,9 @@ -export function TeamIssuesBreakdown() { +import type {IssuesBreakdown} from 'sentry/views/organizationStats/teamInsights/teamIssuesBreakdown'; + +export function TeamIssuesBreakdown(): IssuesBreakdown { return { 2: { '2021-11-19T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -12,7 +13,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-11-20T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -22,7 +22,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-11-21T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -32,7 +31,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-11-22T00:00:00+00:00': { - reviewed: 11, deleted: 11, ignored: 11, resolved: 11, @@ -42,7 +40,6 @@ export function TeamIssuesBreakdown() { total: 11, }, '2021-11-23T00:00:00+00:00': { - reviewed: 7, deleted: 7, ignored: 7, resolved: 7, @@ -52,7 +49,6 @@ export function TeamIssuesBreakdown() { total: 20, }, '2021-11-24T00:00:00+00:00': { - reviewed: 4, deleted: 4, ignored: 4, resolved: 4, @@ -62,7 +58,6 @@ export function TeamIssuesBreakdown() { total: 10, }, '2021-11-25T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -72,7 +67,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-11-26T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -82,7 +76,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-11-27T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -92,7 +85,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-11-28T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -102,7 +94,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-11-29T00:00:00+00:00': { - reviewed: 8, deleted: 8, ignored: 8, resolved: 8, @@ -112,7 +103,6 @@ export function TeamIssuesBreakdown() { total: 8, }, '2021-11-30T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -122,7 +112,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-12-01T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -132,7 +121,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-12-02T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -142,7 +130,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-12-03T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -152,7 +139,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-12-04T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -162,7 +148,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-12-05T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -172,7 +157,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-12-06T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -182,7 +166,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-12-07T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -192,7 +175,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-12-08T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -202,7 +184,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-12-09T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -212,7 +193,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-12-10T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -222,7 +202,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-12-11T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -232,7 +211,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-12-12T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -242,7 +220,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-12-13T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -252,7 +229,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-12-14T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -262,7 +238,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-12-15T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, @@ -272,7 +247,6 @@ export function TeamIssuesBreakdown() { total: 0, }, '2021-12-16T00:00:00+00:00': { - reviewed: 0, deleted: 0, ignored: 0, resolved: 0, diff --git a/fixtures/js-stubs/tombstones.js b/fixtures/js-stubs/tombstones.js deleted file mode 100644 index d8e3204fb070a0..00000000000000 --- a/fixtures/js-stubs/tombstones.js +++ /dev/null @@ -1,60 +0,0 @@ -export function Tombstones(params = []) { - return [ - { - culprit: 'poll(../../sentry/scripts/views.js)', - level: 'error', - actor: { - username: 'billy@sentry.io', - emails: [ - {is_verified: false, id: '28', email: 'test@test.com'}, - {is_verified: false, id: '17', email: 'billy36@sentry.io'}, - {is_verified: false, id: '11', email: 'awerawer@awe.com'}, - {is_verified: true, id: '10', email: 'billy2@sentry.io'}, - {is_verified: true, id: '5', email: 'billy@sentry.io'}, - ], - isManaged: false, - lastActive: '2018-02-21T01:27:52.255Z', - identities: [ - { - name: '79684', - dateVerified: '2018-02-21T00:52:40.149Z', - provider: {id: 'github', name: 'GitHub'}, - dateSynced: '2018-02-21T00:52:40.149Z', - organization: {slug: 'default', name: 'default'}, - id: '1', - }, - ], - id: '1', - isActive: true, - has2fa: true, - name: 'billy vong', - avatarUrl: - 'https://secure.gravatar.com/avatar/7b544e8eb9d08ed777be5aa82121155a?s=32&d=mm', - dateJoined: '2018-01-10T00:19:59Z', - options: { - timezone: 'America/Los_Angeles', - seenReleaseBroadcast: true, - stacktraceOrder: -1, - language: 'en', - clock24Hours: false, - }, - avatar: { - avatarUuid: '483ed7478a2248d59211f538c2997e0b', - avatarType: 'letter_avatar', - }, - lastLogin: '2018-02-14T07:09:37.536Z', - permissions: [], - email: 'billy@sentry.io', - }, - message: - "This is an example JavaScript exception TypeError Object [object Object] has no method 'updateFrom' poll(../../sentry/scripts/views.js)", - type: 'error', - id: '1', - metadata: { - type: 'TypeError', - value: "Object [object Object] has no method 'updateFrom'", - }, - }, - ...params, - ]; -} diff --git a/fixtures/js-stubs/tombstones.ts b/fixtures/js-stubs/tombstones.ts new file mode 100644 index 00000000000000..693b53690413e3 --- /dev/null +++ b/fixtures/js-stubs/tombstones.ts @@ -0,0 +1,30 @@ +import {EventOrGroupType, GroupTombstone} from 'sentry/types'; + +export function Tombstones(params = []): GroupTombstone[] { + return [ + { + culprit: 'poll(../../sentry/scripts/views.js)', + level: 'error', + actor: { + username: 'billy@sentry.io', + id: '1', + name: 'billy vong', + avatarUrl: + 'https://secure.gravatar.com/avatar/7b544e8eb9d08ed777be5aa82121155a?s=32&d=mm', + avatar: { + avatarUuid: '483ed7478a2248d59211f538c2997e0b', + avatarType: 'letter_avatar', + }, + email: 'billy@sentry.io', + ip_address: '0.0.0.0', + }, + type: EventOrGroupType.ERROR, + id: '1', + metadata: { + type: 'TypeError', + value: "Object [object Object] has no method 'updateFrom'", + }, + }, + ...params, + ]; +} diff --git a/static/app/views/organizationStats/teamInsights/teamAlertsTriggered.tsx b/static/app/views/organizationStats/teamInsights/teamAlertsTriggered.tsx index 396ac444075d0b..73a301842f1801 100644 --- a/static/app/views/organizationStats/teamInsights/teamAlertsTriggered.tsx +++ b/static/app/views/organizationStats/teamInsights/teamAlertsTriggered.tsx @@ -23,7 +23,7 @@ import {MetricRule} from 'sentry/views/alerts/rules/metric/types'; import {ProjectBadge, ProjectBadgeContainer} from './styles'; import {barAxisLabel, convertDayValueObjectToSeries, sortSeriesByDay} from './utils'; -type AlertsTriggered = Record; +export type AlertsTriggered = Record; type AlertsTriggeredRule = MetricRule & { totalThisWeek: number; diff --git a/static/app/views/organizationStats/teamInsights/teamIssuesBreakdown.tsx b/static/app/views/organizationStats/teamInsights/teamIssuesBreakdown.tsx index f8e6a16b35b3ff..e2571375cc0f2d 100644 --- a/static/app/views/organizationStats/teamInsights/teamIssuesBreakdown.tsx +++ b/static/app/views/organizationStats/teamInsights/teamIssuesBreakdown.tsx @@ -30,7 +30,7 @@ interface StatusCounts { unignored?: number; } -type IssuesBreakdown = Record>; +export type IssuesBreakdown = Record>; type Statuses = keyof Omit; diff --git a/static/app/views/settings/account/accountSubscriptions.tsx b/static/app/views/settings/account/accountSubscriptions.tsx index f618a4d111a3b5..3909a7e867cbf2 100644 --- a/static/app/views/settings/account/accountSubscriptions.tsx +++ b/static/app/views/settings/account/accountSubscriptions.tsx @@ -21,7 +21,7 @@ import TextBlock from 'sentry/views/settings/components/text/textBlock'; const ENDPOINT = '/users/me/subscriptions/'; -type Subscription = { +export type Subscription = { email: string; listDescription: string; listId: number; From 942e2781b30ffc7539d0666f7484779ff9b5ce7a Mon Sep 17 00:00:00 2001 From: Abdkhan14 <60121741+Abdkhan14@users.noreply.github.com> Date: Mon, 17 Jul 2023 10:35:00 -0400 Subject: [PATCH 09/67] =?UTF-8?q?feat(perf-detector-threshold-configuratio?= =?UTF-8?q?n)=20Backend=20changes=20to=20add=20pr=E2=80=A6=20(#52892)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Added changes for 3 improvements: - Can't update settings with invalid options. Gets a 400. - Can't cURL to change thresholds for an issue that has detection disabled for the project. - Can't reset thresholds for an issue that has detection disabled for the project. - Added tests. Co-authored-by: Abdullah Khan --- .../project_performance_issue_settings.py | 121 ++++++++++++++---- src/sentry/audit_log/events.py | 2 +- ...test_project_performance_issue_settings.py | 118 ++++++++++++++--- 3 files changed, 197 insertions(+), 44 deletions(-) diff --git a/src/sentry/api/endpoints/project_performance_issue_settings.py b/src/sentry/api/endpoints/project_performance_issue_settings.py index 1986b71fe7e8b5..135ebf4a72520a 100644 --- a/src/sentry/api/endpoints/project_performance_issue_settings.py +++ b/src/sentry/api/endpoints/project_performance_issue_settings.py @@ -1,3 +1,4 @@ +from enum import Enum from typing import Dict, Type from rest_framework import serializers, status @@ -29,20 +30,58 @@ TEN_MB = 10000000 # ten MB in bytes SETTINGS_PROJECT_OPTION_KEY = "sentry:performance_issue_settings" + # These options should only be accessible internally and used by # support to enable/disable performance issue detection for an outlying project # on a case-by-case basis. -map_internal_only_project_settings_to_group: Dict[str, Type[GroupType]] = { - "uncompressed_assets_detection_enabled": PerformanceUncompressedAssetsGroupType, - "consecutive_http_spans_detection_enabled": PerformanceConsecutiveHTTPQueriesGroupType, - "large_http_payload_detection_enabled": PerformanceLargeHTTPPayloadGroupType, - "n_plus_one_db_queries_detection_enabled": PerformanceNPlusOneGroupType, - "n_plus_one_api_calls_detection_enabled": PerformanceNPlusOneAPICallsGroupType, - "db_on_main_thread_detection_enabled": PerformanceDBMainThreadGroupType, - "file_io_on_main_thread_detection_enabled": PerformanceFileIOMainThreadGroupType, - "consecutive_db_queries_detection_enabled": PerformanceConsecutiveDBQueriesGroupType, - "large_render_blocking_asset_detection_enabled": PerformanceRenderBlockingAssetSpanGroupType, - "slow_db_queries_detection_enabled": PerformanceSlowDBQueryGroupType, +class InternalProjectOptions(Enum): + N_PLUS_ONE_DB = "n_plus_one_db_queries_detection_enabled" + UNCOMPRESSED_ASSET = "uncompressed_assets_detection_enabled" + CONSECUTIVE_HTTP_SPANS = "consecutive_http_spans_detection_enabled" + LARGE_HTTP_PAYLOAD = "large_http_payload_detection_enabled" + N_PLUS_ONE_API_CALLS = "n_plus_one_api_calls_detection_enabled" + DB_ON_MAIN_THREAD = "db_on_main_thread_detection_enabled" + FILE_IO_MAIN_THREAD = "file_io_on_main_thread_detection_enabled" + CONSECUTIVE_DB_QUERIES = "consecutive_db_queries_detection_enabled" + RENDER_BLOCKING_ASSET = "large_render_blocking_asset_detection_enabled" + SLOW_DB_QUERY = "slow_db_queries_detection_enabled" + + +class ConfigurableThresholds(Enum): + N_PLUS_ONE_DB_DURATION = "n_plus_one_db_duration_threshold" + UNCOMPRESSED_ASSET_DURATION = "uncompressed_asset_duration_threshold" + UNCOMPRESSED_ASSET_SIZE = "uncompressed_asset_size_threshold" + LARGE_HTTP_PAYLOAD_SIZE = "large_http_payload_size_threshold" + DB_ON_MAIN_THREAD_DURATION = "db_on_main_thread_duration_threshold" + FILE_IO_MAIN_THREAD_DURATION = "file_io_on_main_thread_duration_threshold" + CONSECUTIVE_DB_QUERIES_MIN_TIME_SAVED = "consecutive_db_min_time_saved_threshold" + RENDER_BLOCKING_ASSET_FCP_RATIO = "render_blocking_fcp_ratio" + SLOW_DB_QUERY_DURATION = "slow_db_query_duration_threshold" + + +internal_only_project_settings_to_group_map: Dict[str, Type[GroupType]] = { + InternalProjectOptions.UNCOMPRESSED_ASSET.value: PerformanceUncompressedAssetsGroupType, + InternalProjectOptions.CONSECUTIVE_HTTP_SPANS.value: PerformanceConsecutiveHTTPQueriesGroupType, + InternalProjectOptions.LARGE_HTTP_PAYLOAD.value: PerformanceLargeHTTPPayloadGroupType, + InternalProjectOptions.N_PLUS_ONE_DB.value: PerformanceNPlusOneGroupType, + InternalProjectOptions.N_PLUS_ONE_API_CALLS.value: PerformanceNPlusOneAPICallsGroupType, + InternalProjectOptions.DB_ON_MAIN_THREAD.value: PerformanceDBMainThreadGroupType, + InternalProjectOptions.FILE_IO_MAIN_THREAD.value: PerformanceFileIOMainThreadGroupType, + InternalProjectOptions.CONSECUTIVE_DB_QUERIES.value: PerformanceConsecutiveDBQueriesGroupType, + InternalProjectOptions.RENDER_BLOCKING_ASSET.value: PerformanceRenderBlockingAssetSpanGroupType, + InternalProjectOptions.SLOW_DB_QUERY.value: PerformanceSlowDBQueryGroupType, +} + +configurable_thresholds_to_internal_settings_map: Dict[str, str] = { + ConfigurableThresholds.N_PLUS_ONE_DB_DURATION.value: InternalProjectOptions.N_PLUS_ONE_DB.value, + ConfigurableThresholds.UNCOMPRESSED_ASSET_DURATION.value: InternalProjectOptions.UNCOMPRESSED_ASSET.value, + ConfigurableThresholds.UNCOMPRESSED_ASSET_SIZE.value: InternalProjectOptions.UNCOMPRESSED_ASSET.value, + ConfigurableThresholds.LARGE_HTTP_PAYLOAD_SIZE.value: InternalProjectOptions.LARGE_HTTP_PAYLOAD.value, + ConfigurableThresholds.DB_ON_MAIN_THREAD_DURATION.value: InternalProjectOptions.DB_ON_MAIN_THREAD.value, + ConfigurableThresholds.FILE_IO_MAIN_THREAD_DURATION.value: InternalProjectOptions.FILE_IO_MAIN_THREAD.value, + ConfigurableThresholds.CONSECUTIVE_DB_QUERIES_MIN_TIME_SAVED.value: InternalProjectOptions.CONSECUTIVE_DB_QUERIES.value, + ConfigurableThresholds.RENDER_BLOCKING_ASSET_FCP_RATIO.value: InternalProjectOptions.RENDER_BLOCKING_ASSET.value, + ConfigurableThresholds.SLOW_DB_QUERY_DURATION.value: InternalProjectOptions.SLOW_DB_QUERY.value, } @@ -94,6 +133,20 @@ class ProjectPerformanceIssueSettingsSerializer(serializers.Serializer): http_overhead_detection_enabled = serializers.BooleanField(required=False) +def get_disabled_threshold_options(payload, current_settings): + options = [] + internal_only_settings = [setting.value for setting in InternalProjectOptions] + for option in payload: + if option not in internal_only_settings: + internal_setting_for_option = configurable_thresholds_to_internal_settings_map.get( + option + ) + is_threshold_enabled = current_settings.get(internal_setting_for_option) + if not is_threshold_enabled: + options.append(option) + return options + + @region_silo_endpoint class ProjectPerformanceIssueSettingsEndpoint(ProjectEndpoint): permission_classes = (ProjectOwnerOrSuperUserPermissions,) @@ -125,12 +178,20 @@ def put(self, request: Request, project) -> Response: if not self.has_feature(project, request): return self.respond(status=status.HTTP_404_NOT_FOUND) - body_has_admin_options = any( - [ - option in request.data - for option in map_internal_only_project_settings_to_group.keys() - ] + internal_only_settings = [setting.value for setting in InternalProjectOptions] + threshold_settings = [setting.value for setting in ConfigurableThresholds] + allowed_settings_options = [*internal_only_settings, *threshold_settings] + + body_has_invalid_options = not request.data or any( + [option not in allowed_settings_options for option in request.data] ) + if body_has_invalid_options: + return Response( + {"detail": "Invalid settings option"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + body_has_admin_options = any([option in request.data for option in internal_only_settings]) if body_has_admin_options and not is_active_superuser(request): return Response( {"detail": "Passed options are only modifiable internally"}, @@ -151,8 +212,19 @@ def put(self, request: Request, project) -> Response: SETTINGS_PROJECT_OPTION_KEY, default=performance_issue_settings_default ) + current_settings = {**performance_issue_settings_default, **performance_issue_settings} + data = serializer.validated_data + payload_contains_disabled_threshold_setting = any( + [option in get_disabled_threshold_options(data, current_settings) for option in data] + ) + if payload_contains_disabled_threshold_setting: + return Response( + {"detail": "Disabled options can not be modified"}, + status=status.HTTP_403_FORBIDDEN, + ) + project.update_option( SETTINGS_PROJECT_OPTION_KEY, {**performance_issue_settings_default, **performance_issue_settings, **data}, @@ -175,13 +247,18 @@ def delete(self, request: Request, project) -> Response: return self.respond(status=status.HTTP_404_NOT_FOUND) project_settings = project.get_option(SETTINGS_PROJECT_OPTION_KEY, default={}) + threshold_options = [setting.value for setting in ConfigurableThresholds] + internal_only_settings = [setting.value for setting in InternalProjectOptions] + disabled_options = get_disabled_threshold_options(threshold_options, project_settings) if project_settings: - settings_only_with_admin_options = { - option: project_settings[option] - for option in project_settings - if option in map_internal_only_project_settings_to_group.keys() - } - project.update_option(SETTINGS_PROJECT_OPTION_KEY, settings_only_with_admin_options) + unchanged_options = ( + { # internal settings and disabled threshold settings can not be reset + option: project_settings[option] + for option in project_settings + if option in internal_only_settings or option in disabled_options + } + ) + project.update_option(SETTINGS_PROJECT_OPTION_KEY, unchanged_options) return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/src/sentry/audit_log/events.py b/src/sentry/audit_log/events.py index ebf2f74c31df04..68ecf5853088a3 100644 --- a/src/sentry/audit_log/events.py +++ b/src/sentry/audit_log/events.py @@ -129,7 +129,7 @@ def __init__(self): def render(self, audit_log_entry: AuditLogEntry): from sentry.api.endpoints.project_performance_issue_settings import ( - map_internal_only_project_settings_to_group as map, + internal_only_project_settings_to_group_map as map, ) data = audit_log_entry.data diff --git a/tests/sentry/api/endpoints/test_project_performance_issue_settings.py b/tests/sentry/api/endpoints/test_project_performance_issue_settings.py index 49f547d2bfa5e0..898eec3c7686c5 100644 --- a/tests/sentry/api/endpoints/test_project_performance_issue_settings.py +++ b/tests/sentry/api/endpoints/test_project_performance_issue_settings.py @@ -7,6 +7,7 @@ from sentry.testutils import APITestCase from sentry.testutils.helpers import override_options from sentry.testutils.silo import region_silo_test +from sentry.utils.performance_issues.performance_detection import get_merged_settings PERFORMANCE_ISSUE_FEATURES = { "organizations:performance-view": True, @@ -30,7 +31,8 @@ def setUp(self) -> None: }, ) - def test_get_project_options_overrides_detection_defaults(self): + @patch("sentry.models.ProjectOption.objects.get_value") + def test_get_project_options_overrides_detection_defaults(self, get_value): with self.feature(PERFORMANCE_ISSUE_FEATURES): response = self.client.get(self.url, format="json") @@ -47,11 +49,7 @@ def test_get_project_options_overrides_detection_defaults(self): assert response.data["consecutive_db_queries_detection_enabled"] assert response.data["large_render_blocking_asset_detection_enabled"] - patch_project_option_get = patch("sentry.models.ProjectOption.objects.get_value") - self.project_option_mock = patch_project_option_get.start() - self.project_option_mock.return_value = {} - - self.project_option_mock.return_value = { + get_value.return_value = { "slow_db_queries_detection_enabled": False, "n_plus_one_db_queries_detection_enabled": False, "uncompressed_assets_detection_enabled": False, @@ -69,8 +67,6 @@ def test_get_project_options_overrides_detection_defaults(self): assert response.status_code == 200, response.content - self.addCleanup(patch_project_option_get.stop) - assert not response.data["n_plus_one_db_queries_detection_enabled"] assert not response.data["slow_db_queries_detection_enabled"] assert not response.data["uncompressed_assets_detection_enabled"] @@ -82,7 +78,8 @@ def test_get_project_options_overrides_detection_defaults(self): assert not response.data["consecutive_db_queries_detection_enabled"] assert not response.data["large_render_blocking_asset_detection_enabled"] - def test_get_project_options_overrides_threshold_defaults(self): + @patch("sentry.models.ProjectOption.objects.get_value") + def test_get_project_options_overrides_threshold_defaults(self, get_value): with override_options( { "performance.issues.slow_db_query.duration_threshold": 1000, @@ -112,11 +109,7 @@ def test_get_project_options_overrides_threshold_defaults(self): assert response.data["uncompressed_asset_size_threshold"] == 200000 assert response.data["consecutive_db_min_time_saved_threshold"] == 300 - patch_project_option_get = patch("sentry.models.ProjectOption.objects.get_value") - self.project_option_mock = patch_project_option_get.start() - self.project_option_mock.return_value = {} - - self.project_option_mock.return_value = { + get_value.return_value = { "n_plus_one_db_duration_threshold": 10000, "slow_db_query_duration_threshold": 5000, "render_blocking_fcp_ratio": 0.8, @@ -133,8 +126,6 @@ def test_get_project_options_overrides_threshold_defaults(self): assert response.status_code == 200, response.content - self.addCleanup(patch_project_option_get.stop) - # Updated project settings assert response.data["slow_db_query_duration_threshold"] == 5000 assert response.data["n_plus_one_db_duration_threshold"] == 10000 @@ -201,6 +192,32 @@ def test_put_update_non_super_user_option(self): assert get_response.status_code == 200, response.content assert get_response.data["n_plus_one_db_duration_threshold"] == 3000 + @patch("sentry.models.ProjectOption.objects.get_value") + def test_put_does_not_update_disabled_option(self, get_value): + self.login_as(user=self.user, superuser=False) + get_value.return_value = { + "n_plus_one_db_queries_detection_enabled": False, + } + with self.feature(PERFORMANCE_ISSUE_FEATURES): + response = self.client.put( + self.url, + data={ + "n_plus_one_db_duration_threshold": 3000, + }, + ) + + assert response.status_code == 403, response.content + assert response.data == {"detail": "Disabled options can not be modified"} + + with self.feature(PERFORMANCE_ISSUE_FEATURES): + get_response = self.client.get(self.url, format="json") + + assert get_response.status_code == 200, response.content + assert ( + get_response.data["n_plus_one_db_duration_threshold"] + == get_merged_settings(self.project)["n_plus_one_db_duration_threshold"] + ) + def test_update_project_setting_check_validation(self): with self.feature(PERFORMANCE_ISSUE_FEATURES): response = self.client.put( @@ -217,6 +234,18 @@ def test_update_project_setting_check_validation(self): ] } + def test_update_project_setting_invalid_option(self): + with self.feature(PERFORMANCE_ISSUE_FEATURES): + response = self.client.put( + self.url, + data={ + "n_plus_one_db_queries_detection_enabled_invalid": 500, + }, + ) + + assert response.status_code == 400, response.content + assert response.data == {"detail": "Invalid settings option"} + @patch("sentry.api.base.create_audit_entry") def test_changing_admin_settings_creates_audit_log(self, create_audit_entry: MagicMock): @@ -241,20 +270,25 @@ def test_changing_admin_settings_creates_audit_log(self, create_audit_entry: Mag "public": self.project.public, } - def test_delete_reset_project_settings(self): + def test_delete_resets_enabled_project_settings(self): self.project.update_option( SETTINGS_PROJECT_OPTION_KEY, { "n_plus_one_db_queries_detection_enabled": False, - "n_plus_one_db_duration_threshold": 1000, + "slow_db_queries_detection_enabled": True, + "slow_db_query_duration_threshold": 5000, }, ) + assert not self.project.get_option(SETTINGS_PROJECT_OPTION_KEY)[ "n_plus_one_db_queries_detection_enabled" ] + assert self.project.get_option(SETTINGS_PROJECT_OPTION_KEY)[ + "slow_db_queries_detection_enabled" + ] assert ( - self.project.get_option(SETTINGS_PROJECT_OPTION_KEY)["n_plus_one_db_duration_threshold"] - == 1000 + self.project.get_option(SETTINGS_PROJECT_OPTION_KEY)["slow_db_query_duration_threshold"] + == 5000 ) with self.feature(PERFORMANCE_ISSUE_FEATURES): @@ -267,6 +301,48 @@ def test_delete_reset_project_settings(self): assert not self.project.get_option(SETTINGS_PROJECT_OPTION_KEY)[ "n_plus_one_db_queries_detection_enabled" ] # admin option should persist - assert "n_plus_one_db_duration_threshold" not in self.project.get_option( + assert self.project.get_option(SETTINGS_PROJECT_OPTION_KEY)[ + "slow_db_queries_detection_enabled" + ] + assert "slow_db_query_duration_threshold" not in self.project.get_option( SETTINGS_PROJECT_OPTION_KEY + ) # removes enabled threshold settings + + def test_delete_does_not_resets_enabled_project_settings(self): + self.project.update_option( + SETTINGS_PROJECT_OPTION_KEY, + { + "n_plus_one_db_queries_detection_enabled": False, + "slow_db_queries_detection_enabled": False, + "slow_db_query_duration_threshold": 5000, + }, ) + + assert not self.project.get_option(SETTINGS_PROJECT_OPTION_KEY)[ + "n_plus_one_db_queries_detection_enabled" + ] + assert not self.project.get_option(SETTINGS_PROJECT_OPTION_KEY)[ + "slow_db_queries_detection_enabled" + ] + assert ( + self.project.get_option(SETTINGS_PROJECT_OPTION_KEY)["slow_db_query_duration_threshold"] + == 5000 + ) + + with self.feature(PERFORMANCE_ISSUE_FEATURES): + response = self.client.delete( + self.url, + data={}, + ) + + assert response.status_code == 204, response.content + assert not self.project.get_option(SETTINGS_PROJECT_OPTION_KEY)[ + "n_plus_one_db_queries_detection_enabled" + ] # admin option should persist + assert not self.project.get_option(SETTINGS_PROJECT_OPTION_KEY)[ + "slow_db_queries_detection_enabled" + ] + assert ( + self.project.get_option(SETTINGS_PROJECT_OPTION_KEY)["slow_db_query_duration_threshold"] + == 5000 + ) # setting persists as detection is disabled for corresponding issue From a456dae6dc0b02542218f68cfa3ed08b0725f508 Mon Sep 17 00:00:00 2001 From: sergiosentry <109162568+sergiosentry@users.noreply.github.com> Date: Mon, 17 Jul 2023 10:39:29 -0400 Subject: [PATCH 10/67] Add export dashboard button ## Overview (working with @narsaynorath - requesting code review directly from Nar) Allows export of dashboard to JSON file which can later be imported ### Related PRs - Feature flag: https://github.com/getsentry/sentry/pull/52234 - Dashboard import: https://github.com/getsentry/sentry/pull/52307 ![Screenshot 2023-07-06 at 8 07 47 PM](https://github.com/sergiosentry/sentry/assets/109162568/c694a6a5-5a1d-46dd-bad5-d5aaaf63cdce) ![Screenshot 2023-07-06 at 8 08 21 PM](https://github.com/sergiosentry/sentry/assets/109162568/8e7b1cbf-df8e-41ee-a270-bccf9b3eab29) ![Screenshot 2023-07-06 at 8 08 59 PM](https://github.com/sergiosentry/sentry/assets/109162568/420a0a99-2fd8-448c-bfa8-c25ea4bb81f9) --- static/app/views/dashboards/controls.tsx | 17 +- .../app/views/dashboards/exportDashboard.tsx | 164 ++++++++++++++++++ 2 files changed, 180 insertions(+), 1 deletion(-) create mode 100644 static/app/views/dashboards/exportDashboard.tsx diff --git a/static/app/views/dashboards/controls.tsx b/static/app/views/dashboards/controls.tsx index 3f72a213b6613a..552fe08c0e701c 100644 --- a/static/app/views/dashboards/controls.tsx +++ b/static/app/views/dashboards/controls.tsx @@ -8,13 +8,14 @@ import ButtonBar from 'sentry/components/buttonBar'; import Confirm from 'sentry/components/confirm'; import {Hovercard} from 'sentry/components/hovercard'; import {Tooltip} from 'sentry/components/tooltip'; -import {IconAdd, IconEdit} from 'sentry/icons'; +import {IconAdd, IconDownload, IconEdit} from 'sentry/icons'; import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import {Organization} from 'sentry/types'; import {trackAnalytics} from 'sentry/utils/analytics'; import {UNSAVED_FILTERS_MESSAGE} from './detail'; +import exportDashboard from './exportDashboard'; import {DashboardListItem, DashboardState, MAX_WIDGETS} from './types'; type Props = { @@ -129,6 +130,20 @@ function Controls({ {hasFeature => ( + + + +
+ +
+ +
); } -const TeamPanelItem = styled(PanelItem)` - padding: ${space(2)}; - align-items: center; - justify-content: space-between; -`; +const GRID_TEMPLATE = ` + display: grid; + grid-template-columns: minmax(100px, 1fr) minmax(0px, 100px) 200px 95px; + gap: ${space(1)}; -const TeamPanelItemLeft = styled('div')` - flex-grow: 4; + > div:last-child { + margin-left: auto; + } `; -const TeamOrgRole = styled('div')` - min-width: 90px; - flex-grow: 1; - display: flex; - justify-content: center; +const TeamPanelHeader = styled(PanelHeader)` + ${GRID_TEMPLATE} `; -const RoleSelectWrapper = styled('div')` - min-width: 200px; - margin-right: ${space(2)}; +const TeamPanelItem = styled(PanelItem)` + ${GRID_TEMPLATE} + padding: ${space(2)}; `; export default TeamSelect; diff --git a/static/app/views/settings/organizationTeams/allTeamsRow.tsx b/static/app/views/settings/organizationTeams/allTeamsRow.tsx index 74311f238c4c5b..c99710fa31b411 100644 --- a/static/app/views/settings/organizationTeams/allTeamsRow.tsx +++ b/static/app/views/settings/organizationTeams/allTeamsRow.tsx @@ -282,24 +282,19 @@ const TeamLink = styled(Link)` export {AllTeamsRow}; export default withApi(AllTeamsRow); -const TeamPanelItem = styled(PanelItem)` +export const GRID_TEMPLATE = ` display: grid; - grid-template-columns: minmax(150px, 4fr) min-content; - grid-template-rows: auto min-content; - gap: ${space(2)}; + grid-template-columns: minmax(150px, 4fr) minmax(0px, 100px) 125px 110px; + gap: ${space(1)}; +`; + +const TeamPanelItem = styled(PanelItem)` + ${GRID_TEMPLATE} align-items: center; > div:last-child { margin-left: auto; } - - @media (min-width: ${p => p.theme.breakpoints.small}) { - grid-template-columns: minmax(150px, 3fr) minmax(90px, 1fr) minmax(90px, 1fr) min-content; - grid-template-rows: auto; - > div:empty { - display: block !important; - } - } `; const DisplayRole = styled('div')<{isHidden: boolean}>` diff --git a/static/app/views/settings/organizationTeams/organizationTeams.tsx b/static/app/views/settings/organizationTeams/organizationTeams.tsx index e50fb22d37a1fc..dc9da8307215a6 100644 --- a/static/app/views/settings/organizationTeams/organizationTeams.tsx +++ b/static/app/views/settings/organizationTeams/organizationTeams.tsx @@ -12,6 +12,7 @@ import PanelBody from 'sentry/components/panels/panelBody'; import PanelHeader from 'sentry/components/panels/panelHeader'; import SearchBar from 'sentry/components/searchBar'; import SentryDocumentTitle from 'sentry/components/sentryDocumentTitle'; +import {TeamRoleColumnLabel} from 'sentry/components/teamRoleUtils'; import {DEFAULT_DEBOUNCE_DURATION} from 'sentry/constants'; import {IconAdd} from 'sentry/icons'; import {t} from 'sentry/locale'; @@ -22,6 +23,7 @@ import SettingsPageHeader from 'sentry/views/settings/components/settingsPageHea import {RoleOverwritePanelAlert} from 'sentry/views/settings/organizationTeams/roleOverwriteWarning'; import AllTeamsList from './allTeamsList'; +import {GRID_TEMPLATE} from './allTeamsRow'; import OrganizationAccessRequests from './organizationAccessRequests'; type Props = { @@ -97,7 +99,14 @@ function OrganizationTeams({ query={teamQuery} /> - {t('Your Teams')} + +
{t('Your Teams')}
+
+
+ +
+
+ { /> - +
{t('Members')}
+
+ +
{this.renderDropdown(isTeamAdmin)}
-
+ {this.state.teamMembers.length ? ( this.state.teamMembers.map(member => { return ( @@ -390,4 +396,8 @@ const StyledCreateMemberLink = styled(Link)` text-transform: none; `; +const StyledPanelHeader = styled(PanelHeader)` + ${GRID_TEMPLATE} +`; + export default withConfig(withApi(withOrganization(TeamMembers))); diff --git a/static/app/views/settings/organizationTeams/teamMembersRow.tsx b/static/app/views/settings/organizationTeams/teamMembersRow.tsx index 8b0dba309040c7..33dc225d2cc420 100644 --- a/static/app/views/settings/organizationTeams/teamMembersRow.tsx +++ b/static/app/views/settings/organizationTeams/teamMembersRow.tsx @@ -128,10 +128,14 @@ const RoleSelectWrapper = styled('div')` } `; -const TeamRolesPanelItem = styled(PanelItem)` +export const GRID_TEMPLATE = ` display: grid; - grid-template-columns: minmax(120px, 4fr) minmax(120px, 2fr) minmax(100px, 1fr); - gap: ${space(2)}; + grid-template-columns: minmax(100px, 1fr) 200px 95px; + gap: ${space(1)}; +`; + +const TeamRolesPanelItem = styled(PanelItem)` + ${GRID_TEMPLATE}; align-items: center; > div:last-child { From 43cfac6d155e1e1304220edc4ca551867c24805f Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Mon, 17 Jul 2023 18:25:45 +0200 Subject: [PATCH 18/67] fix: delete not working on memcached (#52979) --- src/sentry/monkey/__init__.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/src/sentry/monkey/__init__.py b/src/sentry/monkey/__init__.py index 3f1ab2cb25b7f8..f554f26aa787d2 100644 --- a/src/sentry/monkey/__init__.py +++ b/src/sentry/monkey/__init__.py @@ -25,5 +25,21 @@ def patch_celery_imgcat(): term.imgcat = lambda *a, **kw: b"" +def patch_memcached(): + # Fixes a bug in Django 3.2 + try: + from django.core.cache.backends.memcached import MemcachedCache + except ImportError: + return + + def fixed_delete(self, key, version=None): + key = self.make_key(key, version=version) + self.validate_key(key) + return bool(self._cache.delete(key)) + + MemcachedCache.delete = fixed_delete # type: ignore[method-assign] + + patch_celery_imgcat() patch_pickle_loaders() +patch_memcached() From ec9733576f719c91c228f7feb32c2e985612a994 Mon Sep 17 00:00:00 2001 From: NisanthanNanthakumar Date: Mon, 17 Jul 2023 09:38:35 -0700 Subject: [PATCH 19/67] ref(escalating): Remove threshold experiment (#52942) ## Objective: The threshold should be the original threshold, set when we did the internal release and 100 beta customer release. --- src/sentry/conf/server.py | 2 -- src/sentry/features/__init__.py | 1 - src/sentry/issues/forecasts.py | 18 +++--------------- 3 files changed, 3 insertions(+), 18 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 0e6ce4a82e476f..2ae9ba86933570 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1354,8 +1354,6 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: "organizations:discover-query": True, # Enable archive/escalating issue workflow "organizations:escalating-issues": False, - # Enable escalating forecast threshold a/b experiment - "organizations:escalating-issues-experiment-group": False, # Enable archive/escalating issue workflow in MS Teams "organizations:escalating-issues-msteams": False, # Enable archive/escalating issue workflow features in v2 diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index b6e0959c612d62..b853d96ac104d5 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -218,7 +218,6 @@ default_manager.add("organizations:discover-query", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:dynamic-sampling", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:escalating-issues", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) -default_manager.add("organizations:escalating-issues-experiment-group", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:escalating-issues-msteams", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:escalating-issues-v2", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:issue-states", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) diff --git a/src/sentry/issues/forecasts.py b/src/sentry/issues/forecasts.py index c80a30c76af49e..6410d7e0542e46 100644 --- a/src/sentry/issues/forecasts.py +++ b/src/sentry/issues/forecasts.py @@ -6,18 +6,14 @@ from datetime import datetime from typing import Sequence -from sentry import analytics, features +from sentry import analytics from sentry.issues.escalating import ( ParsedGroupsCount, parse_groups_past_counts, query_groups_past_counts, ) from sentry.issues.escalating_group_forecast import EscalatingGroupForecast -from sentry.issues.escalating_issues_alg import ( - generate_issue_forecast, - looser_version, - standard_version, -) +from sentry.issues.escalating_issues_alg import generate_issue_forecast, standard_version from sentry.models import Group from sentry.tasks.base import instrumented_task @@ -38,15 +34,7 @@ def save_forecast_per_group( for group_id, group_count in group_counts.items(): group = group_dict.get(group_id) if group: - forecast_threshold_version = ( - looser_version - if features.has( - "organizations:escalating-issues-experiment-group", group.project.organization - ) - else standard_version - ) - - forecasts = generate_issue_forecast(group_count, time, forecast_threshold_version) + forecasts = generate_issue_forecast(group_count, time, standard_version) forecasts_list = [forecast["forecasted_value"] for forecast in forecasts] escalating_group_forecast = EscalatingGroupForecast( From d2f6b4035b981a82fc35d5d53242142bc45d39a6 Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Mon, 17 Jul 2023 12:39:17 -0400 Subject: [PATCH 20/67] fix(perf): Add chart back to all transactions (#52984) We can't have a double chart row with only 1 allowed chart, adding back related issues for now --- .../app/views/performance/landing/views/allTransactionsView.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/static/app/views/performance/landing/views/allTransactionsView.tsx b/static/app/views/performance/landing/views/allTransactionsView.tsx index 9fb339a1e204f5..3570692b602b90 100644 --- a/static/app/views/performance/landing/views/allTransactionsView.tsx +++ b/static/app/views/performance/landing/views/allTransactionsView.tsx @@ -17,6 +17,7 @@ export function AllTransactionsView(props: BasePerformanceViewProps) { canUseMetricsData(props.organization) ) { doubleChartRowCharts.unshift(PerformanceWidgetSetting.MOST_CHANGED); + doubleChartRowCharts.unshift(PerformanceWidgetSetting.MOST_RELATED_ISSUES); } else { doubleChartRowCharts.unshift(PerformanceWidgetSetting.MOST_REGRESSED); doubleChartRowCharts.push(PerformanceWidgetSetting.MOST_IMPROVED); From aa870e32734b7cc7083ab9c34e6fe51ba90561e5 Mon Sep 17 00:00:00 2001 From: Mike Ihbe Date: Mon, 17 Jul 2023 18:50:10 +0200 Subject: [PATCH 21/67] ref(hybrid-cloud): Restrict notification settings UX to a single organization (#50279) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Refactor the notification settings page to restrict viewing notification settings to a single organization. This is required to simplify some user notification setting APIs and make them compatible with hybrid cloud. Currently the user notifications APIs return "virtual" notification settings for every project. This page doesn't really use those settings, it instead renders projects from the list of projects and merges in what gets returned. I think it should work fine if we only return actual notification settings records, which will allow us to simplify the user notification settings endpoints by removing these "virtual" entries -- assuming this was the only use case. Issue_Alert_Notifications_—_Sentry --------- Co-authored-by: Valery Brobbey Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com> Co-authored-by: Zachary Collins Co-authored-by: Zach Collins Co-authored-by: Mark Story --- .../account/accountNotificationFineTuning.tsx | 61 +++++++--- .../notifications/notificationSettings.tsx | 2 +- .../notificationSettingsByOrganization.tsx | 15 ++- .../notificationSettingsByProjects.spec.tsx | 7 +- .../notificationSettingsByProjects.tsx | 113 +++++++++++++----- .../notificationSettingsByType.spec.tsx | 2 +- .../notificationSettingsByType.tsx | 86 ++++++++++--- .../settings/account/notifications/utils.tsx | 11 ++ 8 files changed, 221 insertions(+), 76 deletions(-) diff --git a/static/app/views/settings/account/accountNotificationFineTuning.tsx b/static/app/views/settings/account/accountNotificationFineTuning.tsx index bb8501de30582f..1dbdbb5bff9d51 100644 --- a/static/app/views/settings/account/accountNotificationFineTuning.tsx +++ b/static/app/views/settings/account/accountNotificationFineTuning.tsx @@ -19,7 +19,9 @@ import { ACCOUNT_NOTIFICATION_FIELDS, FineTuneField, } from 'sentry/views/settings/account/notifications/fields'; -import NotificationSettingsByType from 'sentry/views/settings/account/notifications/notificationSettingsByType'; +import NotificationSettingsByType, { + OrganizationSelectHeader, +} from 'sentry/views/settings/account/notifications/notificationSettingsByType'; import { getNotificationTypeFromPathname, groupByOrganization, @@ -71,7 +73,6 @@ function AccountNotificationsByProject({projects, field}: ANBPProps) { {data.map(({name, projects: projectFields}) => (
- {name} {projectFields.map(f => ( | null; notifications: Record | null; + organizationId: string; projects: Project[] | null; }; class AccountNotificationFineTuning extends DeprecatedAsyncView { + getDefaultState() { + return { + ...super.getDefaultState(), + emails: [], + fineTuneData: null, + notifications: [], + projects: [], + organizationId: this.props.organizations[0].id, + }; + } + getEndpoints(): ReturnType { const {fineTuneType: pathnameType} = this.props.params; + const orgId = this.state?.organizationId || this.props.organizations[0].id; const fineTuneType = getNotificationTypeFromPathname(pathnameType); const endpoints = [ ['notifications', '/users/me/notifications/'], @@ -148,7 +162,7 @@ class AccountNotificationFineTuning extends DeprecatedAsyncView { ]; if (isGroupedByProject(fineTuneType)) { - endpoints.push(['projects', '/projects/']); + endpoints.push(['projects', `/projects/?organization_id=${orgId}`]); } endpoints.push(['emails', '/users/me/emails/']); @@ -178,6 +192,14 @@ class AccountNotificationFineTuning extends DeprecatedAsyncView { ); } + handleOrgChange = (option: {label: string; value: string}) => { + this.setState({organizationId: option.value}); + const self = this; + setTimeout(() => { + self.reloadData(); + }, 0); + }; + renderBody() { const {params} = this.props; const {fineTuneType: pathnameType} = params; @@ -204,7 +226,6 @@ class AccountNotificationFineTuning extends DeprecatedAsyncView { if (!notifications || !fineTuneData) { return null; } - return (
@@ -227,19 +248,25 @@ class AccountNotificationFineTuning extends DeprecatedAsyncView { )} + + {isProject ? ( + + + {this.renderSearchInput({ + placeholder: t('Search Projects'), + url, + stateKey, + })} + + ) : ( + {t('Organizations')} + )} + - - {isProject ? t('Projects') : t('Organizations')} -
- {isProject && - this.renderSearchInput({ - placeholder: t('Search Projects'), - url, - stateKey, - })} -
-
-
{ getEndpoints(): ReturnType { return [ - ['notificationSettings', `/users/me/notification-settings/`], + ['notificationSettings', `/users/me/notification-settings/`, {v2: 'serializer'}], ['legacyData', '/users/me/notifications/'], ]; } diff --git a/static/app/views/settings/account/notifications/notificationSettingsByOrganization.tsx b/static/app/views/settings/account/notifications/notificationSettingsByOrganization.tsx index 9bd73f0b1eeedd..5d62e902a33e5c 100644 --- a/static/app/views/settings/account/notifications/notificationSettingsByOrganization.tsx +++ b/static/app/views/settings/account/notifications/notificationSettingsByOrganization.tsx @@ -1,5 +1,4 @@ import Form from 'sentry/components/forms/form'; -import JsonForm from 'sentry/components/forms/jsonForm'; import {t} from 'sentry/locale'; import {OrganizationSummary} from 'sentry/types'; import withOrganizations from 'sentry/utils/withOrganizations'; @@ -7,6 +6,7 @@ import { NotificationSettingsByProviderObject, NotificationSettingsObject, } from 'sentry/views/settings/account/notifications/constants'; +import {StyledJsonForm} from 'sentry/views/settings/account/notifications/notificationSettingsByProjects'; import { getParentData, getParentField, @@ -38,11 +38,16 @@ function NotificationSettingsByOrganization({ initialData={getParentData(notificationType, notificationSettings, organizations)} onSubmitSuccess={onSubmitSuccess} > - - getParentField(notificationType, notificationSettings, organization, onChange) - )} + fields={organizations.map(organization => { + return getParentField( + notificationType, + notificationSettings, + organization, + onChange + ); + })} /> ); diff --git a/static/app/views/settings/account/notifications/notificationSettingsByProjects.spec.tsx b/static/app/views/settings/account/notifications/notificationSettingsByProjects.spec.tsx index 4ac17e252eb5bf..5d975c0c8e8a01 100644 --- a/static/app/views/settings/account/notifications/notificationSettingsByProjects.spec.tsx +++ b/static/app/views/settings/account/notifications/notificationSettingsByProjects.spec.tsx @@ -5,10 +5,10 @@ import {Project} from 'sentry/types'; import NotificationSettingsByProjects from 'sentry/views/settings/account/notifications/notificationSettingsByProjects'; const renderComponent = (projects: Project[]) => { - const {routerContext} = initializeOrg(); + const {routerContext, organization} = initializeOrg(); MockApiClient.addMockResponse({ - url: '/projects/', + url: `/projects/`, method: 'GET', body: projects, }); @@ -28,6 +28,9 @@ const renderComponent = (projects: Project[]) => { notificationSettings={notificationSettings} onChange={jest.fn()} onSubmitSuccess={jest.fn()} + organizationId={organization.id} + organizations={[organization]} + handleOrgChange={jest.fn()} />, {context: routerContext} ); diff --git a/static/app/views/settings/account/notifications/notificationSettingsByProjects.tsx b/static/app/views/settings/account/notifications/notificationSettingsByProjects.tsx index def32bddfa6861..418990727d44b4 100644 --- a/static/app/views/settings/account/notifications/notificationSettingsByProjects.tsx +++ b/static/app/views/settings/account/notifications/notificationSettingsByProjects.tsx @@ -6,8 +6,11 @@ import EmptyMessage from 'sentry/components/emptyMessage'; import Form from 'sentry/components/forms/form'; import JsonForm from 'sentry/components/forms/jsonForm'; import Pagination from 'sentry/components/pagination'; +import Panel from 'sentry/components/panels/panel'; +import PanelBody from 'sentry/components/panels/panelBody'; +import PanelHeader from 'sentry/components/panels/panelHeader'; import {t} from 'sentry/locale'; -import {Project} from 'sentry/types'; +import {Organization, Project} from 'sentry/types'; import {sortProjects} from 'sentry/utils'; import { MIN_PROJECTS_FOR_PAGINATION, @@ -15,6 +18,7 @@ import { NotificationSettingsByProviderObject, NotificationSettingsObject, } from 'sentry/views/settings/account/notifications/constants'; +import {OrganizationSelectHeader} from 'sentry/views/settings/account/notifications/notificationSettingsByType'; import { getParentData, getParentField, @@ -25,7 +29,7 @@ import { SearchWrapper, } from 'sentry/views/settings/components/defaultSearchBar'; -type Props = { +export type NotificationSettingsByProjectsBaseProps = { notificationSettings: NotificationSettingsObject; notificationType: string; onChange: ( @@ -33,7 +37,14 @@ type Props = { parentId: string ) => NotificationSettingsObject; onSubmitSuccess: () => void; -} & DeprecatedAsyncComponent['props']; +}; + +export type Props = { + handleOrgChange: Function; + organizationId: string; + organizations: Organization[]; +} & NotificationSettingsByProjectsBaseProps & + DeprecatedAsyncComponent['props']; type State = { projects: Project[]; @@ -48,7 +59,15 @@ class NotificationSettingsByProjects extends DeprecatedAsyncComponent { - return [['projects', '/projects/']]; + return [ + [ + 'projects', + `/projects/`, + { + query: {organizationId: this.props.organizationId}, + }, + ], + ]; } /** @@ -74,6 +93,12 @@ class NotificationSettingsByProjects extends DeprecatedAsyncComponent { + // handleOrgChange(option: {label: string; value: string}) { + this.props.handleOrgChange(option); + setTimeout(() => this.reloadData(), 0); + }; + renderBody() { const {notificationType, notificationSettings, onChange, onSubmitSuccess} = this.props; @@ -88,35 +113,50 @@ class NotificationSettingsByProjects extends DeprecatedAsyncComponent - {canSearch && - this.renderSearchInput({ - stateKey: 'projects', - url: '/projects/', - placeholder: t('Search Projects'), - children: renderSearch, - })} -
- {projects.length === 0 ? ( - {t('No projects found')} - ) : ( - Object.entries(this.getGroupedProjects()).map(([groupTitle, parents]) => ( - - getParentField(notificationType, notificationSettings, parent, onChange) - )} - /> - )) - )} - + + + + {canSearch && + this.renderSearchInput({ + stateKey: 'projects', + url: `/projects/?organizationId=${this.props.organizationId}`, + placeholder: t('Search Projects'), + children: renderSearch, + })} + + +
+ {projects.length === 0 ? ( + {t('No projects found')} + ) : ( + Object.entries(this.getGroupedProjects()).map(([groupTitle, parents]) => ( + + getParentField( + notificationType, + notificationSettings, + parent, + onChange + ) + )} + /> + )) + )} + +
{canSearch && shouldPaginate && ( )} @@ -132,3 +172,10 @@ const StyledSearchWrapper = styled(SearchWrapper)` width: 100%; } `; + +export const StyledJsonForm = styled(JsonForm)` + ${Panel} { + border: 0; + margin-bottom: 0; + } +`; diff --git a/static/app/views/settings/account/notifications/notificationSettingsByType.spec.tsx b/static/app/views/settings/account/notifications/notificationSettingsByType.spec.tsx index 5720e97d524bae..0142a84af99e72 100644 --- a/static/app/views/settings/account/notifications/notificationSettingsByType.spec.tsx +++ b/static/app/views/settings/account/notifications/notificationSettingsByType.spec.tsx @@ -30,7 +30,7 @@ function renderMockRequests( }); MockApiClient.addMockResponse({ - url: '/projects/', + url: `/projects/`, method: 'GET', body: [], }); diff --git a/static/app/views/settings/account/notifications/notificationSettingsByType.tsx b/static/app/views/settings/account/notifications/notificationSettingsByType.tsx index 725b0150f6131b..29f3fb1a9206bf 100644 --- a/static/app/views/settings/account/notifications/notificationSettingsByType.tsx +++ b/static/app/views/settings/account/notifications/notificationSettingsByType.tsx @@ -1,9 +1,11 @@ import {Fragment} from 'react'; import DeprecatedAsyncComponent from 'sentry/components/deprecatedAsyncComponent'; +import SelectControl from 'sentry/components/forms/controls/selectControl'; import Form from 'sentry/components/forms/form'; import JsonForm from 'sentry/components/forms/jsonForm'; import {Field} from 'sentry/components/forms/types'; +import Panel from 'sentry/components/panels/panel'; import SentryDocumentTitle from 'sentry/components/sentryDocumentTitle'; import {t} from 'sentry/locale'; import {Organization, OrganizationSummary} from 'sentry/types'; @@ -40,6 +42,45 @@ import { import SettingsPageHeader from 'sentry/views/settings/components/settingsPageHeader'; import TextBlock from 'sentry/views/settings/components/text/textBlock'; +type OrganizationSelectHeaderProps = { + handleOrgChange: Function; + organizationId: string; + organizations: Organization[]; +}; + +export function OrganizationSelectHeader({ + handleOrgChange, + organizationId, + organizations, +}: OrganizationSelectHeaderProps) { + const getOrganizationOptions = () => { + return organizations.map(org => { + return { + label: org.name, + value: org.id, + }; + }); + }; + + return ( + + {t('Settings for Organization')} + option.value} + value={organizationId} + styles={{ + container: (provided: {[x: string]: string | number | boolean}) => ({ + ...provided, + minWidth: `300px`, + }), + }} + /> + + ); +} + type Props = { notificationType: string; organizations: Organization[]; @@ -78,6 +119,7 @@ class NotificationSettingsByType extends DeprecatedAsyncComponent notificationSettings: {}, identities: [], organizationIntegrations: [], + organizationId: this.props.organizations[0].id, }; } @@ -87,7 +129,7 @@ class NotificationSettingsByType extends DeprecatedAsyncComponent [ 'notificationSettings', `/users/me/notification-settings/`, - {query: getQueryParams(notificationType)}, + {query: getQueryParams(notificationType), v2: 'serializer'}, ], ['identities', `/users/me/identities/`, {query: {provider: 'slack'}}], [ @@ -318,6 +360,10 @@ class NotificationSettingsByType extends DeprecatedAsyncComponent }); }; + handleOrgChange = (option: {label: string; value: string}) => { + this.setState({organizationId: option.value}); + }; + renderBody() { const {notificationType} = this.props; const {notificationSettings} = this.state; @@ -350,22 +396,28 @@ class NotificationSettingsByType extends DeprecatedAsyncComponent fields={this.getFields()} /> - {!isEverythingDisabled(notificationType, notificationSettings) && - (isGroupedByProject(notificationType) ? ( - this.trackTuningUpdated('project')} - /> - ) : ( - this.trackTuningUpdated('organization')} - /> - ))} + {!isEverythingDisabled(notificationType, notificationSettings) && ( + + {isGroupedByProject(notificationType) ? ( + this.trackTuningUpdated('project')} + organizations={this.props.organizations} + organizationId={this.state.organizationId} + handleOrgChange={this.handleOrgChange} + /> + ) : ( + this.trackTuningUpdated('organization')} + /> + )} + + )} ); } diff --git a/static/app/views/settings/account/notifications/utils.tsx b/static/app/views/settings/account/notifications/utils.tsx index a5ad221e5b74dd..418dde83656db9 100644 --- a/static/app/views/settings/account/notifications/utils.tsx +++ b/static/app/views/settings/account/notifications/utils.tsx @@ -53,7 +53,18 @@ export const getFallBackValue = (notificationType: string): string => { return 'committed_only'; case 'workflow': return 'subscribe_only'; + case 'approval': + return 'always'; + case 'quota': + return 'always'; + case 'spikeProtection': + return 'always'; + case 'reports': + return 'always'; default: + // These are the expected potential settings with fallback of '' + // issue, quotaErrors, quotaTransactions, quotaAttachments, + // quotaReplays, quotaWarnings, quotaSpendAllocations return ''; } }; From c9439cb509804b2aa2f570acefe7dc5e8c56c30d Mon Sep 17 00:00:00 2001 From: Julia Hoge Date: Mon, 17 Jul 2023 09:51:46 -0700 Subject: [PATCH 22/67] feat(stacktrace): Add stacktrace improvements feature flag (#52983) Closes https://github.com/getsentry/sentry/issues/52591 Add a feature flag to toggle new improvements to the issue details stacktrace ui. --- src/sentry/conf/server.py | 2 ++ src/sentry/features/__init__.py | 1 + 2 files changed, 3 insertions(+) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 2ae9ba86933570..2de1323bf20fd6 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1621,6 +1621,8 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: "organizations:anr-rate": False, # Enable tag improvements in the issue details page "organizations:issue-details-tag-improvements": False, + # Enable updates to the stacktrace ui + "organizations:issue-details-stacktrace-improvements": False, # Enable the release details performance section "organizations:release-comparison-performance": False, # Enable team insights page diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index b853d96ac104d5..1e949f4faed426 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -91,6 +91,7 @@ default_manager.add("organizations:issue-details-replay-event", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-details-most-helpful-event", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-details-tag-improvements", OrganizationFeature, FeatureHandlerStrategy.REMOTE) +default_manager.add("organizations:issue-details-stacktrace-improvements", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-platform", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-search-allow-postgres-only-search", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-search-use-cdc-primary", OrganizationFeature, FeatureHandlerStrategy.REMOTE) From 6affbee1bdbe28aebebd4954e857dac4674e6b79 Mon Sep 17 00:00:00 2001 From: Leander Rodrigues Date: Mon, 17 Jul 2023 09:59:03 -0700 Subject: [PATCH 23/67] feat(hybrid-cloud): Add Bitbucket Server parser (#52928) There's only one endpoint for Bitbucket Server, and it contains the organization ID in it, so we can reuse the original Bitbucket code, which was a pleasant surprise. --- .../integrations/integration_control.py | 2 + .../integrations/parsers/__init__.py | 2 + .../integrations/parsers/bitbucket.py | 3 + .../integrations/parsers/bitbucket_server.py | 22 ++++++ src/sentry/models/outbox.py | 1 + .../parsers/test_bitbucket_server.py | 68 +++++++++++++++++++ 6 files changed, 98 insertions(+) create mode 100644 src/sentry/middleware/integrations/parsers/bitbucket_server.py create mode 100644 tests/sentry/middleware/integrations/parsers/test_bitbucket_server.py diff --git a/src/sentry/middleware/integrations/integration_control.py b/src/sentry/middleware/integrations/integration_control.py index 7c6b4c2ec1c6c4..1f722e78da4899 100644 --- a/src/sentry/middleware/integrations/integration_control.py +++ b/src/sentry/middleware/integrations/integration_control.py @@ -8,6 +8,7 @@ from .parsers import ( BitbucketRequestParser, + BitbucketServerRequestParser, GithubEnterpriseRequestParser, GithubRequestParser, GitlabRequestParser, @@ -23,6 +24,7 @@ ACTIVE_PARSERS = [ BitbucketRequestParser, + BitbucketServerRequestParser, GithubEnterpriseRequestParser, GithubRequestParser, GitlabRequestParser, diff --git a/src/sentry/middleware/integrations/parsers/__init__.py b/src/sentry/middleware/integrations/parsers/__init__.py index 744666cda5eaf4..755f746ea29378 100644 --- a/src/sentry/middleware/integrations/parsers/__init__.py +++ b/src/sentry/middleware/integrations/parsers/__init__.py @@ -1,4 +1,5 @@ from .bitbucket import BitbucketRequestParser +from .bitbucket_server import BitbucketServerRequestParser from .github import GithubRequestParser from .github_enterprise import GithubEnterpriseRequestParser from .gitlab import GitlabRequestParser @@ -10,6 +11,7 @@ __all__ = ( "BitbucketRequestParser", + "BitbucketServerRequestParser", "GithubRequestParser", "GitlabRequestParser", "JiraRequestParser", diff --git a/src/sentry/middleware/integrations/parsers/bitbucket.py b/src/sentry/middleware/integrations/parsers/bitbucket.py index bea8faf0698031..53a17bf8c9fe02 100644 --- a/src/sentry/middleware/integrations/parsers/bitbucket.py +++ b/src/sentry/middleware/integrations/parsers/bitbucket.py @@ -16,6 +16,9 @@ class BitbucketRequestParser(BaseRequestParser): webhook_identifier = WebhookProviderIdentifier.BITBUCKET def get_bitbucket_webhook_response(self): + """ + Used for identifying regions from Bitbucket and Bitbucket Server webhooks + """ # The organization is provided in the path, so we can skip inferring organizations # from the integration credentials organization_id = self.match.kwargs.get("organization_id") diff --git a/src/sentry/middleware/integrations/parsers/bitbucket_server.py b/src/sentry/middleware/integrations/parsers/bitbucket_server.py new file mode 100644 index 00000000000000..3b17e07e30ffc3 --- /dev/null +++ b/src/sentry/middleware/integrations/parsers/bitbucket_server.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +import logging + +from django.http import HttpResponse + +from sentry.integrations.bitbucket_server.webhook import BitbucketServerWebhookEndpoint +from sentry.middleware.integrations.parsers.bitbucket import BitbucketRequestParser +from sentry.models.outbox import WebhookProviderIdentifier + +logger = logging.getLogger(__name__) + + +class BitbucketServerRequestParser(BitbucketRequestParser): + provider = "bitbucket_server" + webhook_identifier = WebhookProviderIdentifier.BITBUCKET_SERVER + + def get_response(self) -> HttpResponse: + view_class = self.match.func.view_class # type: ignore + if view_class == BitbucketServerWebhookEndpoint: + return self.get_bitbucket_webhook_response() + return self.get_response_from_control_silo() diff --git a/src/sentry/models/outbox.py b/src/sentry/models/outbox.py index 3d7a35a79f544a..4f51d97e060d7b 100644 --- a/src/sentry/models/outbox.py +++ b/src/sentry/models/outbox.py @@ -107,6 +107,7 @@ class WebhookProviderIdentifier(IntEnum): VSTS = 6 JIRA_SERVER = 7 GITHUB_ENTERPRISE = 8 + BITBUCKET_SERVER = 9 def _ensure_not_null(k: str, v: Any) -> Any: diff --git a/tests/sentry/middleware/integrations/parsers/test_bitbucket_server.py b/tests/sentry/middleware/integrations/parsers/test_bitbucket_server.py new file mode 100644 index 00000000000000..d8ec94b5242171 --- /dev/null +++ b/tests/sentry/middleware/integrations/parsers/test_bitbucket_server.py @@ -0,0 +1,68 @@ +from unittest import mock +from unittest.mock import MagicMock + +from django.http import HttpResponse +from django.test import RequestFactory, override_settings +from django.urls import reverse + +from sentry.middleware.integrations.integration_control import IntegrationControlMiddleware +from sentry.middleware.integrations.parsers.bitbucket_server import BitbucketServerRequestParser +from sentry.models.outbox import WebhookProviderIdentifier +from sentry.services.hybrid_cloud.organization_mapping.service import organization_mapping_service +from sentry.silo.base import SiloMode +from sentry.testutils import TestCase +from sentry.testutils.outbox import assert_webhook_outboxes +from sentry.testutils.region import override_regions +from sentry.testutils.silo import control_silo_test +from sentry.types.region import Region, RegionCategory + + +@control_silo_test() +class BitbucketServerRequestParserTest(TestCase): + get_response = MagicMock(return_value=HttpResponse(content=b"no-error", status=200)) + middleware = IntegrationControlMiddleware(get_response) + factory = RequestFactory() + region = Region("na", 1, "https://na.testserver", RegionCategory.MULTI_TENANT) + region_config = (region,) + + def setUp(self): + super().setUp() + self.path = reverse( + "sentry-extensions-bitbucket-webhook", kwargs={"organization_id": self.organization.id} + ) + self.integration = self.create_integration( + organization=self.organization, + external_id="bitbucketserver:1", + provider="bitbucket_server", + ) + + @override_settings(SILO_MODE=SiloMode.CONTROL) + def test_routing_webhook(self): + region_route = reverse( + "sentry-extensions-bitbucketserver-webhook", + kwargs={"organization_id": self.organization.id, "integration_id": self.integration.id}, + ) + request = self.factory.post(region_route) + parser = BitbucketServerRequestParser(request=request, response_handler=self.get_response) + + # Missing region + organization_mapping_service.update( + organization_id=self.organization.id, update={"region_name": "eu"} + ) + with mock.patch.object( + parser, "get_response_from_control_silo" + ) as get_response_from_control_silo: + parser.get_response() + assert get_response_from_control_silo.called + + # Valid region + organization_mapping_service.update( + organization_id=self.organization.id, update={"region_name": "na"} + ) + with override_regions(self.region_config): + parser.get_response() + assert_webhook_outboxes( + factory_request=request, + webhook_identifier=WebhookProviderIdentifier.BITBUCKET_SERVER, + region_names=[self.region.name], + ) From b3aa577ef198cf63d90064e8b1d5fbd9df4e3fd6 Mon Sep 17 00:00:00 2001 From: Seiji Chew <67301797+schew2381@users.noreply.github.com> Date: Mon, 17 Jul 2023 10:10:16 -0700 Subject: [PATCH 24/67] feat(api): Document add/remove team to/from project (#52321) New Add: Screenshot 2023-07-10 at 2 49 33 PM New Delete: ![Screenshot 2023-07-05 at 3 57 44 PM](https://github.com/getsentry/sentry/assets/67301797/a3cfb0fc-12df-4ea2-9f81-501195ace38e) --- .../api/endpoints/project_team_details.py | 60 ++++++++++---- .../apidocs/examples/project_examples.py | 83 +++++++++++++++++++ .../endpoints/test_project_team_details.py | 8 +- 3 files changed, 132 insertions(+), 19 deletions(-) diff --git a/src/sentry/api/endpoints/project_team_details.py b/src/sentry/api/endpoints/project_team_details.py index 65c37b027d56a5..f92eeb0c0017cd 100644 --- a/src/sentry/api/endpoints/project_team_details.py +++ b/src/sentry/api/endpoints/project_team_details.py @@ -1,60 +1,86 @@ -from django.http import Http404 +from drf_spectacular.utils import extend_schema from rest_framework.request import Request from rest_framework.response import Response from sentry.api.base import region_silo_endpoint from sentry.api.bases.project import ProjectEndpoint, ProjectPermission +from sentry.api.exceptions import ResourceDoesNotExist from sentry.api.serializers import serialize from sentry.api.serializers.models.project import ProjectWithTeamSerializer +from sentry.apidocs.constants import RESPONSE_FORBIDDEN, RESPONSE_NOT_FOUND +from sentry.apidocs.examples.project_examples import ProjectExamples +from sentry.apidocs.parameters import GlobalParams from sentry.models import Team class ProjectTeamsPermission(ProjectPermission): scope_map = { - "GET": ["project:read", "project:write", "project:admin"], "POST": ["project:write", "project:admin"], - "PUT": ["project:write", "project:admin"], # allow deletes with write permission because it's just removing # a team from a project and not anything more destructive "DELETE": ["project:write", "project:admin"], } +@extend_schema(tags=["Projects"]) @region_silo_endpoint class ProjectTeamDetailsEndpoint(ProjectEndpoint): + public = {"POST", "DELETE"} permission_classes = (ProjectTeamsPermission,) + @extend_schema( + operation_id="Add a Team to a Project", + parameters=[ + GlobalParams.ORG_SLUG, + GlobalParams.PROJECT_SLUG, + GlobalParams.TEAM_SLUG, + ], + request=None, + responses={ + 201: ProjectWithTeamSerializer, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + examples=ProjectExamples.ADD_TEAM_TO_PROJECT, + ) def post(self, request: Request, project, team_slug) -> Response: """ - Give a team access to a project - ``````````````````````````````` - :pparam string organization_slug: the slug of the organization. - :pparam string project_slug: the slug of the project. - :pparam string team_slug: the slug of the team. - :auth: required + Give a team access to a project. """ try: team = Team.objects.get(organization_id=project.organization_id, slug=team_slug) except Team.DoesNotExist: - raise Http404 + raise ResourceDoesNotExist(detail="Team does not exist.") # A user with project:write can grant access to this project to other user/teams project.add_team(team) return Response(serialize(project, request.user, ProjectWithTeamSerializer()), status=201) + @extend_schema( + operation_id="Delete a Team from a Project", + parameters=[ + GlobalParams.ORG_SLUG, + GlobalParams.PROJECT_SLUG, + GlobalParams.TEAM_SLUG, + ], + request=None, + responses={ + 200: ProjectWithTeamSerializer, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + examples=ProjectExamples.DELETE_TEAM_FROM_PROJECT, + ) def delete(self, request: Request, project, team_slug) -> Response: """ - Revoke a team's access to a project - ``````````````````````````````````` - :pparam string organization_slug: the slug of the organization. - :pparam string project_slug: the slug of the project. - :pparam string team_slug: the slug of the team. - :auth: required + Revoke a team's access to a project. + + Note that Team Admins can only revoke access to teams they are admins of. """ try: team = Team.objects.get(organization_id=project.organization_id, slug=team_slug) except Team.DoesNotExist: - raise Http404 + raise ResourceDoesNotExist(detail="Team does not exist.") if not request.access.has_team_scope(team, "project:write"): return Response( diff --git a/src/sentry/apidocs/examples/project_examples.py b/src/sentry/apidocs/examples/project_examples.py index ae589002f8f558..911957aa1df01f 100644 --- a/src/sentry/apidocs/examples/project_examples.py +++ b/src/sentry/apidocs/examples/project_examples.py @@ -427,6 +427,71 @@ } +def project_with_team(extra_team: bool = False): + teams = [ + { + "id": "2349234102", + "name": "Prime Mover", + "slug": "prime-mover", + }, + { + "id": "47584447", + "name": "Powerful Abolitionist", + "slug": "powerful-abolitionist", + }, + ] + return { + "id": "6758470122493650", + "slug": "The Spoiled Yoghurt", + "name": "the-spoiled-yoghurt", + "platform": "javascript", + "dateCreated": "2023-03-29T15:25:21.344565Z", + "isBookmarked": False, + "isMember": True, + "features": [ + "alert-filters", + "custom-inbound-filters", + "data-forwarding", + "discard-groups", + "minidump", + "race-free-group-creation", + "rate-limits", + "servicehooks", + "similarity-indexing", + "similarity-view", + ], + "firstEvent": None, + "firstTransactionEvent": True, + "access": [ + "project:read", + "event:read", + "team:read", + "alerts:read", + "org:read", + "event:write", + "project:releases", + "member:read", + ], + "hasAccess": True, + "hasMinifiedStackTrace": False, + "hasMonitors": False, + "hasProfiles": False, + "hasReplays": False, + "hasSessions": False, + "isInternal": False, + "isPublic": False, + "avatar": {"avatarType": "letter_avatar", "avatarUuid": None}, + "color": "#5cbf3f", + "status": "active", + "team": { + "id": "2349234102", + "name": "Prime Mover", + "slug": "prime-mover", + }, + "teams": teams if extra_team else teams[:1], + } + + class ProjectExamples: BASE_KEY = [ OpenApiExample( @@ -475,3 +540,21 @@ class ProjectExamples: response_only=True, ), ] + + ADD_TEAM_TO_PROJECT = [ + OpenApiExample( + "Give a Team Access to a Project", + value=project_with_team(extra_team=True), + status_codes=["201"], + response_only=True, + ), + ] + + DELETE_TEAM_FROM_PROJECT = [ + OpenApiExample( + "Revoke a Team's Access to a Project", + value=project_with_team(), + status_codes=["200"], + response_only=True, + ), + ] diff --git a/tests/sentry/api/endpoints/test_project_team_details.py b/tests/sentry/api/endpoints/test_project_team_details.py index ef9b69ee55231e..f0b6e6604abd4f 100644 --- a/tests/sentry/api/endpoints/test_project_team_details.py +++ b/tests/sentry/api/endpoints/test_project_team_details.py @@ -34,13 +34,15 @@ def test_add_team(self): def test_add_team_not_found(self): project = self.create_project() - self.get_error_response( + response = self.get_error_response( project.organization.slug, project.slug, "not-a-team", status_code=status.HTTP_404_NOT_FOUND, ) + assert response.data["detail"] == "Team does not exist." + @with_feature("organizations:team-roles") def test_add_team_with_team_role(self): user = self.create_user(username="foo") @@ -131,13 +133,15 @@ def test_remove_team(self): def test_remove_team_not_found(self): project = self.create_project() - self.get_error_response( + response = self.get_error_response( project.organization.slug, project.slug, "not-a-team", status_code=status.HTTP_404_NOT_FOUND, ) + assert response.data["detail"] == "Team does not exist." + @with_feature("organizations:team-roles") def test_remove_team_with_team_role(self): user = self.create_user(username="foo") From 1f5b7634e1c1e2ae0501a67c5987897988ea5467 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Mon, 17 Jul 2023 10:12:24 -0700 Subject: [PATCH 25/67] ref(replays): update links to remove project slug (#52933) Closes #51541 by updating all links to replays to remove any instances of organization slug, project slug, etc. Checked the following: - [x] issue details - [x] issue replays tab - [x] transaction summary - [x] transaction replays tab - [x] discover event detail - [x] discover home - [x] replay list page --- .../components/events/eventReplay/replayPreview.spec.tsx | 2 +- .../app/components/events/eventReplay/replayPreview.tsx | 2 +- .../issueDetails/groupReplays/groupReplays.spec.tsx | 9 +++------ .../transactionSummary/transactionReplays/index.spec.tsx | 4 ++-- .../app/views/performance/transactionSummary/utils.tsx | 6 +++--- static/app/views/replays/replayTable/tableCell.tsx | 2 +- 6 files changed, 11 insertions(+), 14 deletions(-) diff --git a/static/app/components/events/eventReplay/replayPreview.spec.tsx b/static/app/components/events/eventReplay/replayPreview.spec.tsx index d37ff175508266..ff18db4dd55e04 100644 --- a/static/app/components/events/eventReplay/replayPreview.spec.tsx +++ b/static/app/components/events/eventReplay/replayPreview.spec.tsx @@ -25,7 +25,7 @@ const mockEvent = { }; const mockButtonHref = - '/organizations/sentry-emerging-tech/replays/761104e184c64d439ee1014b72b4d83b/?referrer=%2Forganizations%2F%3AorgId%2Fissues%2F%3AgroupId%2Freplays%2F&t=62&t_main=console'; + '/replays/761104e184c64d439ee1014b72b4d83b/?referrer=%2Forganizations%2F%3AorgId%2Fissues%2F%3AgroupId%2Freplays%2F&t=62&t_main=console'; // Mock screenfull library jest.mock('screenfull', () => ({ diff --git a/static/app/components/events/eventReplay/replayPreview.tsx b/static/app/components/events/eventReplay/replayPreview.tsx index a107dae6b8f415..64c7ad6b59cef1 100644 --- a/static/app/components/events/eventReplay/replayPreview.tsx +++ b/static/app/components/events/eventReplay/replayPreview.tsx @@ -99,7 +99,7 @@ function ReplayPreview({orgSlug, replaySlug, event, onClickOpenReplay}: Props) { } const fullReplayUrl = { - pathname: `/organizations/${orgSlug}/replays/${replayId}/`, + pathname: `/replays/${replayId}/`, query: { referrer: getRouteStringFromRoutes(routes), t_main: 'console', diff --git a/static/app/views/issueDetails/groupReplays/groupReplays.spec.tsx b/static/app/views/issueDetails/groupReplays/groupReplays.spec.tsx index f1b34fe7d91a04..6d463febd76e76 100644 --- a/static/app/views/issueDetails/groupReplays/groupReplays.spec.tsx +++ b/static/app/views/issueDetails/groupReplays/groupReplays.spec.tsx @@ -293,10 +293,7 @@ describe('GroupReplays', () => { finished_at: new Date('2022-09-15T06:54:00+00:00'), id: '346789a703f6454384f1de473b8b9fcc', started_at: new Date('2022-09-15T06:50:00+00:00'), - urls: [ - 'https://dev.getsentry.net:7999/organizations/sentry-emerging-tech/replays/', - '/organizations/sentry-emerging-tech/replays/?project=2', - ], + urls: ['https://dev.getsentry.net:7999/replays/', '/replays/?project=2'], }, { ...TestStubs.ReplayList()[0], @@ -342,13 +339,13 @@ describe('GroupReplays', () => { // Expect the first row to have the correct href expect(screen.getAllByRole('link', {name: 'testDisplayName'})[0]).toHaveAttribute( 'href', - `/organizations/org-slug/replays/project-slug:${REPLAY_ID_1}/?${expectedQuery}` + `/replays/${REPLAY_ID_1}/?${expectedQuery}` ); // Expect the second row to have the correct href expect(screen.getAllByRole('link', {name: 'testDisplayName'})[1]).toHaveAttribute( 'href', - `/organizations/org-slug/replays/project-slug:${REPLAY_ID_2}/?${expectedQuery}` + `/replays/${REPLAY_ID_2}/?${expectedQuery}` ); // Expect the first row to have the correct duration diff --git a/static/app/views/performance/transactionSummary/transactionReplays/index.spec.tsx b/static/app/views/performance/transactionSummary/transactionReplays/index.spec.tsx index 76ca76be508ed4..852b28d6ebfee8 100644 --- a/static/app/views/performance/transactionSummary/transactionReplays/index.spec.tsx +++ b/static/app/views/performance/transactionSummary/transactionReplays/index.spec.tsx @@ -247,13 +247,13 @@ describe('TransactionReplays', () => { // Expect the first row to have the correct href expect(screen.getAllByRole('link', {name: 'testDisplayName'})[0]).toHaveAttribute( 'href', - `/organizations/org-slug/replays/project-slug:346789a703f6454384f1de473b8b9fcc/?${expectedQuery}` + `/replays/346789a703f6454384f1de473b8b9fcc/?${expectedQuery}` ); // Expect the second row to have the correct href expect(screen.getAllByRole('link', {name: 'testDisplayName'})[1]).toHaveAttribute( 'href', - `/organizations/org-slug/replays/project-slug:b05dae9b6be54d21a4d5ad9f8f02b780/?${expectedQuery}` + `/replays/b05dae9b6be54d21a4d5ad9f8f02b780/?${expectedQuery}` ); // Expect the first row to have the correct duration diff --git a/static/app/views/performance/transactionSummary/utils.tsx b/static/app/views/performance/transactionSummary/utils.tsx index 309d75fcea4854..b3879118d741f7 100644 --- a/static/app/views/performance/transactionSummary/utils.tsx +++ b/static/app/views/performance/transactionSummary/utils.tsx @@ -175,7 +175,7 @@ export function generateReplayLink(routes: PlainRoute[]) { const referrer = getRouteStringFromRoutes(routes); return ( - organization: Organization, + _: Organization, tableRow: TableDataRow, _query: Query | undefined ): LocationDescriptor => { @@ -186,7 +186,7 @@ export function generateReplayLink(routes: PlainRoute[]) { if (!tableRow.timestamp) { return { - pathname: `/organizations/${organization.slug}/replays/${replayId}/`, + pathname: `/replays/${replayId}/`, query: { referrer, }, @@ -199,7 +199,7 @@ export function generateReplayLink(routes: PlainRoute[]) { : undefined; return { - pathname: `/organizations/${organization.slug}/replays/${replayId}/`, + pathname: `/replays/${replayId}/`, query: { event_t: transactionStartTimestamp, referrer, diff --git a/static/app/views/replays/replayTable/tableCell.tsx b/static/app/views/replays/replayTable/tableCell.tsx index b75f878e8383d1..f6222459205a0d 100644 --- a/static/app/views/replays/replayTable/tableCell.tsx +++ b/static/app/views/replays/replayTable/tableCell.tsx @@ -56,7 +56,7 @@ export function ReplayCell({ const project = projects.find(p => p.id === replay.project_id); const replayDetails = { - pathname: `/organizations/${organization.slug}/replays/${project?.slug}:${replay.id}/`, + pathname: `/replays/${replay.id}/`, query: { referrer, ...eventView.generateQueryStringObject(), From 51feb345499065bc340d1f6ad171fb47a1b91bba Mon Sep 17 00:00:00 2001 From: Ian Woodard <17186604+IanWoodard@users.noreply.github.com> Date: Mon, 17 Jul 2023 10:28:02 -0700 Subject: [PATCH 26/67] ref(ci): adding another backend test shard (#52987) Adding another backend test shard to hopefully decrease the amount backend test runs that timeout. --- .github/workflows/backend.yml | 4 ++-- codecov.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index c9a85066a72c6e..be542d8647d6d1 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -74,13 +74,13 @@ jobs: fail-fast: false matrix: # XXX: When updating this, make sure you also update MATRIX_INSTANCE_TOTAL. - instance: [0, 1, 2, 3, 4, 5] + instance: [0, 1, 2, 3, 4, 5, 6] pg-version: ['14'] env: # XXX: `MATRIX_INSTANCE_TOTAL` must be hardcoded to the length of `strategy.matrix.instance`. # If this increases, make sure to also increase `flags.backend.after_n_builds` in `codecov.yml`. - MATRIX_INSTANCE_TOTAL: 6 + MATRIX_INSTANCE_TOTAL: 7 steps: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0 diff --git a/codecov.yml b/codecov.yml index c737102194214f..c4fbf4d5bcd722 100644 --- a/codecov.yml +++ b/codecov.yml @@ -58,7 +58,7 @@ flags: - "src/sentry/**/*.py" carryforward: true # Do not send any status checks until N coverage reports are uploaded - after_n_builds: 17 + after_n_builds: 18 # Read more here: https://docs.codecov.com/docs/pull-request-comments comment: From 3b659440a3d65dfea9d3fd4f33b7b0290093fba1 Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Mon, 17 Jul 2023 10:29:57 -0700 Subject: [PATCH 27/67] ref(replay): Refactor the replay.details-data-loaded analytics event (#52938) --- .../replays/hooks/useLogReplayDataLoaded.tsx | 16 ++++++++++++---- static/app/utils/replays/replayReader.tsx | 5 ----- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/static/app/utils/replays/hooks/useLogReplayDataLoaded.tsx b/static/app/utils/replays/hooks/useLogReplayDataLoaded.tsx index 35b0ca664a23ad..a50836ea529934 100644 --- a/static/app/utils/replays/hooks/useLogReplayDataLoaded.tsx +++ b/static/app/utils/replays/hooks/useLogReplayDataLoaded.tsx @@ -22,9 +22,17 @@ function useLogReplayDataLoaded({fetchError, fetching, projectSlug, replay}: Pro if (fetching || fetchError || !replay || !project) { return; } - const feErrorIds = replay.getReplay().error_ids || []; - const allErrors = replay.getRawErrors(); - const beErrorCount = allErrors.filter(error => !feErrorIds.includes(error.id)).length; + const replayRecord = replay.getReplay(); + const allErrors = replay.getErrorFrames(); + + // BUG(replay): This will often report the discrepancy between errors + // accociated with the replay, and errors the replay knows about. + // ie: When an error is filtered server-side, it would cound as a replay with 1 + // backend error. + const feErrorIds = replayRecord.error_ids || []; + const beErrorCount = allErrors.filter( + error => !feErrorIds.includes(error.data.eventId) + ).length; trackAnalytics('replay.details-data-loaded', { organization, @@ -35,7 +43,7 @@ function useLogReplayDataLoaded({fetchError, fetching, projectSlug, replay}: Pro total_errors: allErrors.length, started_at_delta: replay.timestampDeltas.startedAtDelta, finished_at_delta: replay.timestampDeltas.finishedAtDelta, - replay_id: replay.getReplay().id, + replay_id: replayRecord.id, }); }, [organization, project, fetchError, fetching, projectSlug, replay]); } diff --git a/static/app/utils/replays/replayReader.tsx b/static/app/utils/replays/replayReader.tsx index db03ca1927174e..21979d68771162 100644 --- a/static/app/utils/replays/replayReader.tsx +++ b/static/app/utils/replays/replayReader.tsx @@ -172,8 +172,6 @@ export default class ReplayReader { replayRecord.finished_at.getTime() - replayRecord.started_at.getTime() ); - this.rawErrors = errors; - this.sortedSpans = spansFactory(spans); this.breadcrumbs = breadcrumbFactory( replayRecord, @@ -194,7 +192,6 @@ export default class ReplayReader { private _sortedRRWebEvents: RecordingFrame[]; private _sortedSpanFrames: SpanFrame[]; - private rawErrors: ReplayError[]; private sortedSpans: ReplaySpan[]; private replayRecord: ReplayRecord; private breadcrumbs: Crumb[]; @@ -330,8 +327,6 @@ export default class ReplayReader { this.breadcrumbs.filter(crumb => crumb.category === 'console') ); - getRawErrors = memoize(() => this.rawErrors); - getIssueCrumbs = memoize(() => this.breadcrumbs.filter(crumb => crumb.category === 'issue') ); From 7b24e6a0652f5cb61faf82cf8e7bfca91effd1cc Mon Sep 17 00:00:00 2001 From: Gabe Villalobos Date: Mon, 17 Jul 2023 11:09:45 -0700 Subject: [PATCH 28/67] fix(hybrid-cloud): Fixes tests for split DB mode app installs using transaction routing and silo annotations (#52932) Co-authored-by: Zachary Collins --- src/sentry/db/postgres/transactions.py | 6 +++--- src/sentry/models/organization.py | 4 ++-- src/sentry/models/organizationmember.py | 4 ++-- .../hybrid_cloud/organizationmember_mapping/impl.py | 10 +++++++--- src/sentry/utils/snowflake.py | 6 ++++-- 5 files changed, 18 insertions(+), 12 deletions(-) diff --git a/src/sentry/db/postgres/transactions.py b/src/sentry/db/postgres/transactions.py index e7052f91aed9ea..c13e50ac498f55 100644 --- a/src/sentry/db/postgres/transactions.py +++ b/src/sentry/db/postgres/transactions.py @@ -5,7 +5,7 @@ import threading from django.conf import settings -from django.db import transaction +from django.db import connections, transaction @contextlib.contextmanager @@ -87,7 +87,7 @@ def in_test_assert_no_transaction(msg: str): from sentry.testutils import hybrid_cloud - for using in settings.DATABASES: # type: ignore + for conn in connections.all(): assert not hybrid_cloud.simulated_transaction_watermarks.connection_above_watermark( - using + connection=conn ), msg diff --git a/src/sentry/models/organization.py b/src/sentry/models/organization.py index 7d4158786f2e3b..5e96171494dffd 100644 --- a/src/sentry/models/organization.py +++ b/src/sentry/models/organization.py @@ -5,7 +5,7 @@ from typing import Collection, FrozenSet, Optional, Sequence from django.conf import settings -from django.db import models, transaction +from django.db import models, router, transaction from django.db.models import QuerySet from django.urls import NoReverseMatch, reverse from django.utils import timezone @@ -255,7 +255,7 @@ def save(self, *args, **kwargs): lambda: self.save_with_update_outbox(*args, **kwargs), ) else: - with outbox_context(transaction.atomic()): + with outbox_context(transaction.atomic(using=router.db_for_write(Organization))): self.save_with_update_outbox(*args, **kwargs) # Override for the default update method to ensure that most atomic updates diff --git a/src/sentry/models/organizationmember.py b/src/sentry/models/organizationmember.py index 8a95424d9b15bb..c67bcd6d5c0df5 100644 --- a/src/sentry/models/organizationmember.py +++ b/src/sentry/models/organizationmember.py @@ -9,7 +9,7 @@ from uuid import uuid4 from django.conf import settings -from django.db import models, transaction +from django.db import models, router, transaction from django.db.models import Q, QuerySet from django.urls import reverse from django.utils import timezone @@ -260,7 +260,7 @@ def save(self, *args, **kwargs): self.user_id and self.email is None ), "Must set either user or email" - with outbox_context(transaction.atomic()): + with outbox_context(transaction.atomic(using=router.db_for_write(OrganizationMember))): if self.token and not self.token_expires_at: self.refresh_expires_at() super().save(*args, **kwargs) diff --git a/src/sentry/services/hybrid_cloud/organizationmember_mapping/impl.py b/src/sentry/services/hybrid_cloud/organizationmember_mapping/impl.py index 9e542cb1ecbe59..5e2cf14ef02cd8 100644 --- a/src/sentry/services/hybrid_cloud/organizationmember_mapping/impl.py +++ b/src/sentry/services/hybrid_cloud/organizationmember_mapping/impl.py @@ -5,7 +5,7 @@ from typing import Optional -from django.db import IntegrityError, transaction +from django.db import IntegrityError, router, transaction from sentry.models import outbox_context from sentry.models.organizationmembermapping import OrganizationMemberMapping @@ -48,7 +48,9 @@ def apply_update(existing: OrganizationMemberMapping) -> None: outbox.save() try: - with outbox_context(transaction.atomic()): + with outbox_context( + transaction.atomic(using=router.db_for_write(OrganizationMemberMapping)) + ): existing = self._find_organization_member( organization_id=organization_id, organizationmember_id=organizationmember_id, @@ -76,7 +78,9 @@ def apply_update(existing: OrganizationMemberMapping) -> None: if existing is None: raise e - with outbox_context(transaction.atomic()): + with outbox_context( + transaction.atomic(using=router.db_for_write(OrganizationMemberMapping)) + ): apply_update(existing) return serialize_org_member_mapping(existing) diff --git a/src/sentry/utils/snowflake.py b/src/sentry/utils/snowflake.py index 854f4438bbe288..c95b5cbec89256 100644 --- a/src/sentry/utils/snowflake.py +++ b/src/sentry/utils/snowflake.py @@ -3,7 +3,7 @@ from typing import Tuple from django.conf import settings -from django.db import IntegrityError, transaction +from django.db import IntegrityError, router, transaction from rest_framework import status from rest_framework.exceptions import APIException @@ -24,7 +24,9 @@ def save_with_snowflake_id(self, snowflake_redis_key, save_callback): if not self.id: self.id = generate_snowflake_id(snowflake_redis_key) try: - with outbox_context(transaction.atomic()): + # We need to route to the correct database in a split DB mode + # so we use the class being mixed in to do this. + with outbox_context(transaction.atomic(using=router.db_for_write(type(self)))): save_callback() return except IntegrityError: From 4eceac562a6a86a27c45da80cf333f6f03843a55 Mon Sep 17 00:00:00 2001 From: Richard Roggenkemper <46740234+roggenkemper@users.noreply.github.com> Date: Mon, 17 Jul 2023 11:18:39 -0700 Subject: [PATCH 29/67] fix(issue-details): Fix react warning (#52907) this pr fixes the react warning that came from https://github.com/getsentry/sentry/issues/52590 --- static/app/views/issueDetails/quickTrace/traceLink.tsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/static/app/views/issueDetails/quickTrace/traceLink.tsx b/static/app/views/issueDetails/quickTrace/traceLink.tsx index 2d043ea9a95a3c..c7016edd5ab332 100644 --- a/static/app/views/issueDetails/quickTrace/traceLink.tsx +++ b/static/app/views/issueDetails/quickTrace/traceLink.tsx @@ -44,7 +44,9 @@ export function TraceLink({event, noTrace}: TraceLinkProps) { ); } -const StyledLink = styled(Link)<{noTrace: boolean}>` +const StyledLink = styled(Link, {shouldForwardProp: prop => prop !== 'noTrace'})<{ + noTrace: boolean; +}>` margin-left: ${p => (p.noTrace ? 0 : space(1))}; font-size: ${p => p.theme.fontSizeSmall}; `; From 145df55c8f374a9c22869dc439cf0a0515e8a830 Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Mon, 17 Jul 2023 14:32:23 -0400 Subject: [PATCH 30/67] feat(perf): Hide timing metrics on http (#52993) ### Summary Timing metrics for http are coming out but there are a lot and there is already a lot of keys on the span details, this will hide the timing metrics for now. We can opt-in specific ones on a case-by-case later. --- .../events/interfaces/spans/spanDetail.tsx | 31 +++++++++++++++---- 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/static/app/components/events/interfaces/spans/spanDetail.tsx b/static/app/components/events/interfaces/spans/spanDetail.tsx index 0e76a174e480cf..e583fbc326afda 100644 --- a/static/app/components/events/interfaces/spans/spanDetail.tsx +++ b/static/app/components/events/interfaces/spans/spanDetail.tsx @@ -69,6 +69,19 @@ const SIZE_DATA_KEYS = [ 'http.response_transfer_size', ]; +const HIDDEN_DATA_KEYS = [ + 'http.request.redirect_start', + 'http.request.fetch_start', + 'http.request.domain_lookup_start', + 'http.request.domain_lookup_end', + 'http.request.connect_start', + 'http.request.secure_connection_start', + 'http.request.connection_end', + 'http.request.request_start', + 'http.request.response_start', + 'http.request.response_end', +]; + type TransactionResult = { id: string; 'project.name': string; @@ -88,6 +101,10 @@ type Props = { trace: Readonly; }; +function isSpanKeyVisible(key: string) { + return !HIDDEN_DATA_KEYS.includes(key); +} + function SpanDetail(props: Props) { const [errorsOpened, setErrorsOpened] = useState(false); const location = useLocation(); @@ -390,7 +407,7 @@ function SpanDetail(props: Props) { const durationString = `${Number(duration.toFixed(3)).toLocaleString()}ms`; const unknownKeys = Object.keys(span).filter(key => { - return !rawSpanKeys.has(key as any); + return isSpanKeyVisible(key) && !rawSpanKeys.has(key as any); }); const {sizeKeys, nonSizeKeys} = partitionSizes(span?.data ?? {}); @@ -522,11 +539,13 @@ function SpanDetail(props: Props) { ))} - {map(nonSizeKeys, (value, key) => ( - - {maybeStringify(value)} - - ))} + {map(nonSizeKeys, (value, key) => + isSpanKeyVisible(key) ? ( + + {maybeStringify(value)} + + ) : null + )} {unknownKeys.map(key => ( {maybeStringify(span[key])} From c9e38b69c7237d0d34b3759dd1284e325d1ddcaf Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Mon, 17 Jul 2023 11:33:52 -0700 Subject: [PATCH 31/67] feat(replay): Render the replay currentTime & hoverTime inside the performance waterfall (#52890) Render a purple vertical bar to represent the current replay time. Also, when hovering, render another bar to represent the point in time where the mouse cursor is. SCR-20230714-jazb We're calling `useReplayContext` on each trace row. When there is no context provider the default values will be fetched. Mainly this include `replay === null` which will short-circuit everything and return null from the component. Fixes https://github.com/getsentry/sentry/issues/51938 --- .../components/performance/waterfall/row.tsx | 41 ++++++++++++++++++- .../traceDetails/transactionBar.tsx | 2 + 2 files changed, 42 insertions(+), 1 deletion(-) diff --git a/static/app/components/performance/waterfall/row.tsx b/static/app/components/performance/waterfall/row.tsx index 84f8f643ed3b58..17eb2f29857e97 100644 --- a/static/app/components/performance/waterfall/row.tsx +++ b/static/app/components/performance/waterfall/row.tsx @@ -1,7 +1,12 @@ +import {Fragment} from 'react'; import styled from '@emotion/styled'; import {ROW_HEIGHT} from 'sentry/components/performance/waterfall/constants'; -import {getBackgroundColor} from 'sentry/components/performance/waterfall/utils'; +import { + getBackgroundColor, + toPercent, +} from 'sentry/components/performance/waterfall/utils'; +import {useReplayContext} from 'sentry/components/replays/replayContext'; interface RowProps extends React.HTMLAttributes { cursor?: 'pointer' | 'default'; @@ -57,3 +62,37 @@ export const RowCell = styled('div')` display: flex; align-items: center; `; + +export function RowReplayTimeIndicators() { + const {currentTime, currentHoverTime, replay} = useReplayContext(); + const durationMs = replay?.getDurationMs(); + + if (!replay || !durationMs) { + return null; + } + + return ( + + + {currentHoverTime !== undefined ? ( + + ) : null} + + ); +} + +const RowIndicatorBar = styled('div')` + background: ${p => p.theme.purple300}; + content: ''; + display: block; + height: 100%; + position: absolute; + top: 0; + transform: translateX(-50%); + width: 1px; + z-index: 1; +`; + +const RowHoverIndicatorBar = styled(RowIndicatorBar)` + background: ${p => p.theme.purple200}; +`; diff --git a/static/app/views/performance/traceDetails/transactionBar.tsx b/static/app/views/performance/traceDetails/transactionBar.tsx index b130ad0c0a02da..3ff6c6c24be163 100644 --- a/static/app/views/performance/traceDetails/transactionBar.tsx +++ b/static/app/views/performance/traceDetails/transactionBar.tsx @@ -19,6 +19,7 @@ import { Row, RowCell, RowCellContainer, + RowReplayTimeIndicators, } from 'sentry/components/performance/waterfall/row'; import {DurationPill, RowRectangle} from 'sentry/components/performance/waterfall/rowBar'; import { @@ -546,6 +547,7 @@ class TransactionBar extends Component { showDetail={showDetail} onClick={this.toggleDisplayDetail} > + {this.renderRectangle()} {this.renderMeasurements()} From 4889710c2b3783ad0766baafbae29426eaecaa5b Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Mon, 17 Jul 2023 14:34:04 -0400 Subject: [PATCH 32/67] ref: fix typing for sentry.models.__init__ (#52992) star imports were clobbering names with incompatible types --- pyproject.toml | 1 - src/sentry/api/endpoints/filechange.py | 3 ++- src/sentry/api/endpoints/organization_release_meta.py | 3 ++- src/sentry/api/endpoints/organization_users.py | 3 ++- src/sentry/api/serializers/models/filechange.py | 3 ++- .../models/organization_member/expand/projects.py | 3 ++- src/sentry/api/serializers/models/project.py | 2 +- src/sentry/api/serializers/models/team.py | 2 +- src/sentry/api/serializers/rest_framework/commit.py | 2 +- src/sentry/deletions/__init__.py | 3 ++- src/sentry/deletions/defaults/commit.py | 3 ++- src/sentry/deletions/defaults/project.py | 3 ++- src/sentry/deletions/defaults/team.py | 2 +- .../discover/endpoints/discover_key_transactions.py | 3 ++- src/sentry/integrations/github/webhook.py | 10 ++-------- src/sentry/models/__init__.py | 4 ++-- src/sentry/models/project.py | 11 +++-------- src/sentry/models/release.py | 2 +- src/sentry/models/team.py | 5 +++-- .../notifications/notifications/activity/release.py | 3 ++- src/sentry/search/events/datasets/field_aliases.py | 2 +- src/sentry/search/events/fields.py | 3 ++- .../services/hybrid_cloud/organization/serial.py | 2 +- src/sentry/testutils/asserts.py | 3 ++- src/sentry/testutils/factories.py | 2 +- src/sentry/utils/committers.py | 3 ++- .../utils/suspect_resolutions/commit_correlation.py | 3 ++- src/sentry_plugins/github/webhooks/events/push.py | 3 ++- .../test_organization_release_commit_files.py | 3 ++- tests/sentry/api/endpoints/test_commit_filechange.py | 3 ++- .../api/endpoints/test_organization_release_meta.py | 2 +- .../api/endpoints/test_organization_releases.py | 2 +- tests/sentry/api/endpoints/test_organization_teams.py | 3 ++- tests/sentry/api/endpoints/test_project_details.py | 2 +- tests/sentry/api/endpoints/test_project_releases.py | 2 +- .../sentry/api/endpoints/test_project_team_details.py | 3 ++- .../sentry/api/serializers/test_commit_filechange.py | 3 ++- tests/sentry/deletions/test_team.py | 3 ++- tests/sentry/dynamic_sampling/test_generate_rules.py | 2 +- tests/sentry/manager/test_projectteam_manager.py | 3 ++- tests/sentry/models/test_commitfilechange.py | 3 ++- tests/sentry/models/test_project.py | 2 +- tests/sentry/models/test_team.py | 2 +- tests/sentry/models/test_teamkeytransaction.py | 2 +- tests/sentry/relay/test_config.py | 3 ++- tests/sentry/snuba/test_discover_query.py | 3 ++- tests/sentry/tasks/test_code_owners.py | 10 ++-------- tests/sentry/tasks/test_post_process.py | 2 +- .../suspect_resolutions/test_commit_correlation.py | 10 ++-------- tests/sentry/utils/test_committers.py | 11 ++--------- .../api/endpoints/test_discover_key_transactions.py | 2 +- tests/snuba/api/endpoints/test_organization_events.py | 3 ++- .../api/endpoints/test_organization_events_mep.py | 2 +- 53 files changed, 87 insertions(+), 91 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d8f99472ec0357..bfae3de90ebebe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -643,7 +643,6 @@ module = [ "sentry.migrations.0381_fix_org_slug_casing", "sentry.migrations.0407_recreate_perf_alert_subscriptions", "sentry.migrations.0418_add_actor_constraints", - "sentry.models", "sentry.models.actor", "sentry.models.apiapplication", "sentry.models.artifactbundle", diff --git a/src/sentry/api/endpoints/filechange.py b/src/sentry/api/endpoints/filechange.py index 5e06f018f70aed..f510c29bc7ac20 100644 --- a/src/sentry/api/endpoints/filechange.py +++ b/src/sentry/api/endpoints/filechange.py @@ -6,7 +6,8 @@ from sentry.api.exceptions import ResourceDoesNotExist from sentry.api.paginator import OffsetPaginator from sentry.api.serializers import serialize -from sentry.models import CommitFileChange, Release, ReleaseCommit, Repository +from sentry.models import Release, ReleaseCommit, Repository +from sentry.models.commitfilechange import CommitFileChange @region_silo_endpoint diff --git a/src/sentry/api/endpoints/organization_release_meta.py b/src/sentry/api/endpoints/organization_release_meta.py index 01501908accefe..00c78b407bb6f7 100644 --- a/src/sentry/api/endpoints/organization_release_meta.py +++ b/src/sentry/api/endpoints/organization_release_meta.py @@ -7,7 +7,8 @@ from sentry.api.bases.organization import OrganizationReleasesBaseEndpoint from sentry.api.exceptions import ResourceDoesNotExist from sentry.api.serializers.models.release import expose_version_info -from sentry.models import CommitFileChange, ProjectPlatform, Release, ReleaseCommit, ReleaseProject +from sentry.models import ProjectPlatform, Release, ReleaseCommit, ReleaseProject +from sentry.models.commitfilechange import CommitFileChange @region_silo_endpoint diff --git a/src/sentry/api/endpoints/organization_users.py b/src/sentry/api/endpoints/organization_users.py index 215bd1cb8abb5b..19c49a873eb1af 100644 --- a/src/sentry/api/endpoints/organization_users.py +++ b/src/sentry/api/endpoints/organization_users.py @@ -6,7 +6,8 @@ from sentry.api.bases.organization import OrganizationEndpoint from sentry.api.serializers import serialize from sentry.api.serializers.models import OrganizationMemberWithProjectsSerializer -from sentry.models import OrganizationMember, OrganizationMemberTeam, ProjectTeam +from sentry.models import OrganizationMember, OrganizationMemberTeam +from sentry.models.projectteam import ProjectTeam @region_silo_endpoint diff --git a/src/sentry/api/serializers/models/filechange.py b/src/sentry/api/serializers/models/filechange.py index 2766f3186a9edc..bd0517b35551a8 100644 --- a/src/sentry/api/serializers/models/filechange.py +++ b/src/sentry/api/serializers/models/filechange.py @@ -1,6 +1,7 @@ from sentry.api.serializers import Serializer, register from sentry.api.serializers.models.commit import get_users_for_commits -from sentry.models import Commit, CommitFileChange, Repository +from sentry.models import Commit, Repository +from sentry.models.commitfilechange import CommitFileChange @register(CommitFileChange) diff --git a/src/sentry/api/serializers/models/organization_member/expand/projects.py b/src/sentry/api/serializers/models/organization_member/expand/projects.py index 23c2eb38f48c9f..6e32d8e7899823 100644 --- a/src/sentry/api/serializers/models/organization_member/expand/projects.py +++ b/src/sentry/api/serializers/models/organization_member/expand/projects.py @@ -1,7 +1,8 @@ from collections import defaultdict from typing import Any, Mapping, MutableMapping, Sequence, cast -from sentry.models import OrganizationMember, OrganizationMemberTeam, ProjectTeam, TeamStatus, User +from sentry.models import OrganizationMember, OrganizationMemberTeam, TeamStatus, User +from sentry.models.projectteam import ProjectTeam from ..base import OrganizationMemberSerializer from ..response import OrganizationMemberWithProjectsResponse diff --git a/src/sentry/api/serializers/models/project.py b/src/sentry/api/serializers/models/project.py index c4627d4792adcc..1c332dad87ee03 100644 --- a/src/sentry/api/serializers/models/project.py +++ b/src/sentry/api/serializers/models/project.py @@ -47,13 +47,13 @@ ProjectBookmark, ProjectOption, ProjectPlatform, - ProjectTeam, Release, Team, User, UserReport, ) from sentry.models.options.project_option import OPTION_KEYS +from sentry.models.projectteam import ProjectTeam from sentry.notifications.helpers import ( get_most_specific_notification_setting_value, transform_to_notification_settings_by_scope, diff --git a/src/sentry/api/serializers/models/team.py b/src/sentry/api/serializers/models/team.py index 86f08d3afbd5e5..a5e9e2705a22fa 100644 --- a/src/sentry/api/serializers/models/team.py +++ b/src/sentry/api/serializers/models/team.py @@ -38,11 +38,11 @@ OrganizationAccessRequest, OrganizationMember, OrganizationMemberTeam, - ProjectTeam, Team, TeamAvatar, User, ) +from sentry.models.projectteam import ProjectTeam from sentry.roles import organization_roles, team_roles from sentry.scim.endpoints.constants import SCIM_SCHEMA_GROUP from sentry.utils.query import RangeQuerySetWrapper diff --git a/src/sentry/api/serializers/rest_framework/commit.py b/src/sentry/api/serializers/rest_framework/commit.py index b1b9bd3f0701e2..56ec850413e5f2 100644 --- a/src/sentry/api/serializers/rest_framework/commit.py +++ b/src/sentry/api/serializers/rest_framework/commit.py @@ -1,7 +1,7 @@ from rest_framework import serializers from sentry.api.serializers.rest_framework.list import ListField -from sentry.models import CommitFileChange +from sentry.models.commitfilechange import CommitFileChange class CommitPatchSetSerializer(serializers.Serializer): diff --git a/src/sentry/deletions/__init__.py b/src/sentry/deletions/__init__.py index f47836dddc7381..f8ef02e09f0515 100644 --- a/src/sentry/deletions/__init__.py +++ b/src/sentry/deletions/__init__.py @@ -90,6 +90,7 @@ def load_defaults(): from sentry import models from sentry.discover.models import DiscoverSavedQuery from sentry.incidents.models import AlertRule + from sentry.models.commitfilechange import CommitFileChange from sentry.monitors import models as monitor_models from . import defaults @@ -102,7 +103,7 @@ def load_defaults(): default_manager.register(models.ApiToken, BulkModelDeletionTask) default_manager.register(models.Commit, defaults.CommitDeletionTask) default_manager.register(models.CommitAuthor, defaults.CommitAuthorDeletionTask) - default_manager.register(models.CommitFileChange, BulkModelDeletionTask) + default_manager.register(CommitFileChange, BulkModelDeletionTask) default_manager.register(models.Deploy, BulkModelDeletionTask) default_manager.register(DiscoverSavedQuery, defaults.DiscoverSavedQueryDeletionTask) default_manager.register(models.Distribution, BulkModelDeletionTask) diff --git a/src/sentry/deletions/defaults/commit.py b/src/sentry/deletions/defaults/commit.py index d33f86d62c3ad6..496ac29eeed0d3 100644 --- a/src/sentry/deletions/defaults/commit.py +++ b/src/sentry/deletions/defaults/commit.py @@ -3,7 +3,8 @@ class CommitDeletionTask(ModelDeletionTask): def get_child_relations(self, instance): - from sentry.models import CommitFileChange, ReleaseCommit, ReleaseHeadCommit + from sentry.models import ReleaseCommit, ReleaseHeadCommit + from sentry.models.commitfilechange import CommitFileChange return [ ModelRelation(CommitFileChange, {"commit_id": instance.id}), diff --git a/src/sentry/deletions/defaults/project.py b/src/sentry/deletions/defaults/project.py index acd4127ad942ba..3a9777e3ee301c 100644 --- a/src/sentry/deletions/defaults/project.py +++ b/src/sentry/deletions/defaults/project.py @@ -6,6 +6,7 @@ def get_child_relations(self, instance): from sentry import models from sentry.discover.models import DiscoverSavedQueryProject from sentry.incidents.models import AlertRule, IncidentProject + from sentry.models.projectteam import ProjectTeam from sentry.monitors.models import Monitor from sentry.replays.models import ReplayRecordingSegment from sentry.snuba.models import QuerySubscription @@ -34,7 +35,7 @@ def get_child_relations(self, instance): models.LatestAppConnectBuildsCheck, models.ProjectBookmark, models.ProjectKey, - models.ProjectTeam, + ProjectTeam, models.PromptsActivity, # order matters, ProjectCodeOwners to be deleted before RepositoryProjectPathConfig models.ProjectCodeOwners, diff --git a/src/sentry/deletions/defaults/team.py b/src/sentry/deletions/defaults/team.py index e015f5c7b0e75f..319936e77e90c6 100644 --- a/src/sentry/deletions/defaults/team.py +++ b/src/sentry/deletions/defaults/team.py @@ -3,7 +3,7 @@ class TeamDeletionTask(ModelDeletionTask): def get_child_relations(self, instance): - from sentry.models import ProjectTeam + from sentry.models.projectteam import ProjectTeam return [ ModelRelation(ProjectTeam, {"team_id": instance.id}), diff --git a/src/sentry/discover/endpoints/discover_key_transactions.py b/src/sentry/discover/endpoints/discover_key_transactions.py index 11db24d807eb83..40e0d1b578c5aa 100644 --- a/src/sentry/discover/endpoints/discover_key_transactions.py +++ b/src/sentry/discover/endpoints/discover_key_transactions.py @@ -14,7 +14,8 @@ from sentry.api.utils import InvalidParams from sentry.discover.endpoints import serializers from sentry.discover.models import TeamKeyTransaction -from sentry.models import ProjectTeam, Team +from sentry.models import Team +from sentry.models.projectteam import ProjectTeam class KeyTransactionPermission(OrganizationPermission): diff --git a/src/sentry/integrations/github/webhook.py b/src/sentry/integrations/github/webhook.py index a720aff1b43a8a..fd21e5c25ff87d 100644 --- a/src/sentry/integrations/github/webhook.py +++ b/src/sentry/integrations/github/webhook.py @@ -18,14 +18,8 @@ from sentry.api.base import Endpoint, region_silo_endpoint from sentry.constants import ObjectStatus from sentry.integrations.utils.scope import clear_tags_and_context -from sentry.models import ( - Commit, - CommitAuthor, - CommitFileChange, - Organization, - PullRequest, - Repository, -) +from sentry.models import Commit, CommitAuthor, Organization, PullRequest, Repository +from sentry.models.commitfilechange import CommitFileChange from sentry.services.hybrid_cloud.identity.service import identity_service from sentry.services.hybrid_cloud.integration.model import ( RpcIntegration, diff --git a/src/sentry/models/__init__.py b/src/sentry/models/__init__.py index d27d77b49c4d56..e24d65b9cc8e32 100644 --- a/src/sentry/models/__init__.py +++ b/src/sentry/models/__init__.py @@ -17,7 +17,7 @@ from .broadcast import * # NOQA from .commit import * # NOQA from .commitauthor import * # NOQA -from .commitfilechange import * # NOQA +from .commitfilechange import CommitFileChange # noqa from .counter import * # NOQA from .dashboard import * # NOQA from .dashboard_widget import * # NOQA @@ -82,7 +82,7 @@ from .projectownership import * # NOQA from .projectplatform import * # NOQA from .projectredirect import * # NOQA -from .projectteam import * # NOQA +from .projectteam import ProjectTeam # noqa from .promptsactivity import * # NOQA from .pullrequest import * # NOQA from .rawevent import * # NOQA diff --git a/src/sentry/models/project.py b/src/sentry/models/project.py index 4d2eab6ef79aa6..ad61dd077ee1e4 100644 --- a/src/sentry/models/project.py +++ b/src/sentry/models/project.py @@ -326,13 +326,13 @@ def transfer_to(self, organization): Environment, EnvironmentProject, ExternalIssue, - ProjectTeam, RegionScheduledDeletion, ReleaseProject, ReleaseProjectEnvironment, Rule, ) from sentry.models.actor import ACTOR_TYPES + from sentry.models.projectteam import ProjectTeam from sentry.monitors.models import Monitor old_org_id = self.organization_id @@ -492,13 +492,8 @@ def copy_settings_from(self, project_id): Returns True if the settings have successfully been copied over Returns False otherwise """ - from sentry.models import ( - EnvironmentProject, - ProjectOption, - ProjectOwnership, - ProjectTeam, - Rule, - ) + from sentry.models import EnvironmentProject, ProjectOption, ProjectOwnership, Rule + from sentry.models.projectteam import ProjectTeam model_list = [EnvironmentProject, ProjectOwnership, ProjectTeam, Rule] diff --git a/src/sentry/models/release.py b/src/sentry/models/release.py index 45499bcc0c7ebd..fe4559a2b5b010 100644 --- a/src/sentry/models/release.py +++ b/src/sentry/models/release.py @@ -37,11 +37,11 @@ from sentry.models import ( Activity, ArtifactBundle, - CommitFileChange, GroupInbox, GroupInboxRemoveAction, remove_group_from_inbox, ) +from sentry.models.commitfilechange import CommitFileChange from sentry.models.grouphistory import GroupHistoryStatus, record_group_history from sentry.signals import issue_resolved from sentry.tasks.relay import schedule_invalidate_project_config diff --git a/src/sentry/models/team.py b/src/sentry/models/team.py index a5dfa49c493b0e..c693aa6bcd2aa8 100644 --- a/src/sentry/models/team.py +++ b/src/sentry/models/team.py @@ -41,7 +41,8 @@ def get_for_user( Returns a list of all teams a user has some level of access to. """ from sentry.auth.superuser import is_active_superuser - from sentry.models import OrganizationMember, OrganizationMemberTeam, Project, ProjectTeam + from sentry.models import OrganizationMember, OrganizationMemberTeam, Project + from sentry.models.projectteam import ProjectTeam if not user.is_authenticated: return [] @@ -230,10 +231,10 @@ def transfer_to(self, organization): OrganizationMember, OrganizationMemberTeam, Project, - ProjectTeam, ReleaseProject, ReleaseProjectEnvironment, ) + from sentry.models.projectteam import ProjectTeam try: with transaction.atomic(): diff --git a/src/sentry/notifications/notifications/activity/release.py b/src/sentry/notifications/notifications/activity/release.py index eb3cf7b20fd763..7bb4d89617b306 100644 --- a/src/sentry/notifications/notifications/activity/release.py +++ b/src/sentry/notifications/notifications/activity/release.py @@ -4,7 +4,8 @@ from sentry_relay import parse_release -from sentry.models import Activity, Commit, CommitFileChange, OrganizationMember, Project +from sentry.models import Activity, Commit, OrganizationMember, Project +from sentry.models.commitfilechange import CommitFileChange from sentry.notifications.types import NotificationSettingTypes from sentry.notifications.utils import ( get_deploy, diff --git a/src/sentry/search/events/datasets/field_aliases.py b/src/sentry/search/events/datasets/field_aliases.py index 8c16deb52d7a5b..43d43b7f41d31d 100644 --- a/src/sentry/search/events/datasets/field_aliases.py +++ b/src/sentry/search/events/datasets/field_aliases.py @@ -7,7 +7,7 @@ from sentry.discover.models import TeamKeyTransaction from sentry.exceptions import IncompatibleMetricsQuery -from sentry.models import ProjectTeam +from sentry.models.projectteam import ProjectTeam from sentry.search.events import builder, constants, fields from sentry.search.events.types import SelectType from sentry.utils.numbers import format_grouped_length diff --git a/src/sentry/search/events/fields.py b/src/sentry/search/events/fields.py index 13bd76629e27df..8cf2809fae52de 100644 --- a/src/sentry/search/events/fields.py +++ b/src/sentry/search/events/fields.py @@ -10,7 +10,8 @@ from sentry.discover.models import TeamKeyTransaction from sentry.exceptions import IncompatibleMetricsQuery, InvalidSearchQuery -from sentry.models import ProjectTeam, ProjectTransactionThreshold +from sentry.models import ProjectTransactionThreshold +from sentry.models.projectteam import ProjectTeam from sentry.models.transaction_threshold import ( TRANSACTION_METRICS, ProjectTransactionThresholdOverride, diff --git a/src/sentry/services/hybrid_cloud/organization/serial.py b/src/sentry/services/hybrid_cloud/organization/serial.py index 568c19ce13145a..c227df4ffd7acf 100644 --- a/src/sentry/services/hybrid_cloud/organization/serial.py +++ b/src/sentry/services/hybrid_cloud/organization/serial.py @@ -9,10 +9,10 @@ OrganizationMember, OrganizationMemberTeam, Project, - ProjectTeam, Team, TeamStatus, ) +from sentry.models.projectteam import ProjectTeam from sentry.services.hybrid_cloud.organization import ( RpcOrganization, RpcOrganizationFlags, diff --git a/src/sentry/testutils/asserts.py b/src/sentry/testutils/asserts.py index a9e210257840a7..01c56b05ffb6d8 100644 --- a/src/sentry/testutils/asserts.py +++ b/src/sentry/testutils/asserts.py @@ -1,6 +1,7 @@ from typing import Optional -from sentry.models import AuditLogEntry, CommitFileChange +from sentry.models import AuditLogEntry +from sentry.models.commitfilechange import CommitFileChange def assert_mock_called_once_with_partial(mock, *args, **kwargs): diff --git a/src/sentry/testutils/factories.py b/src/sentry/testutils/factories.py index 87fcd0e90ff18b..0e0b1c23d01015 100644 --- a/src/sentry/testutils/factories.py +++ b/src/sentry/testutils/factories.py @@ -48,7 +48,6 @@ ArtifactBundle, Commit, CommitAuthor, - CommitFileChange, DocIntegration, DocIntegrationAvatar, Environment, @@ -93,6 +92,7 @@ ) from sentry.models.actor import get_actor_id_for_user from sentry.models.apikey import ApiKey +from sentry.models.commitfilechange import CommitFileChange from sentry.models.integrations.integration_feature import Feature, IntegrationTypes from sentry.models.notificationaction import ( ActionService, diff --git a/src/sentry/utils/committers.py b/src/sentry/utils/committers.py index f3c474f0dce9f4..676c8db400ea38 100644 --- a/src/sentry/utils/committers.py +++ b/src/sentry/utils/committers.py @@ -25,7 +25,8 @@ from sentry.api.serializers.models.commit import CommitSerializer, get_users_for_commits from sentry.api.serializers.models.release import Author from sentry.eventstore.models import Event -from sentry.models import Commit, CommitFileChange, Group, Project, Release, ReleaseCommit +from sentry.models import Commit, Group, Project, Release, ReleaseCommit +from sentry.models.commitfilechange import CommitFileChange from sentry.models.groupowner import GroupOwner, GroupOwnerType from sentry.services.hybrid_cloud.integration import integration_service from sentry.services.hybrid_cloud.user.service import user_service diff --git a/src/sentry/utils/suspect_resolutions/commit_correlation.py b/src/sentry/utils/suspect_resolutions/commit_correlation.py index e73478a118cbfd..3f489e56d58576 100644 --- a/src/sentry/utils/suspect_resolutions/commit_correlation.py +++ b/src/sentry/utils/suspect_resolutions/commit_correlation.py @@ -4,7 +4,8 @@ from datetime import datetime, timedelta from typing import Sequence, Set -from sentry.models import CommitFileChange, Group, GroupRelease, Release, ReleaseCommit +from sentry.models import Group, GroupRelease, Release, ReleaseCommit +from sentry.models.commitfilechange import CommitFileChange @dataclass diff --git a/src/sentry_plugins/github/webhooks/events/push.py b/src/sentry_plugins/github/webhooks/events/push.py index 6cc350488fb57e..773c3870720c0b 100644 --- a/src/sentry_plugins/github/webhooks/events/push.py +++ b/src/sentry_plugins/github/webhooks/events/push.py @@ -5,7 +5,8 @@ from django.http import Http404 from django.utils import timezone -from sentry.models import Commit, CommitAuthor, CommitFileChange, Integration, Repository +from sentry.models import Commit, CommitAuthor, Integration, Repository +from sentry.models.commitfilechange import CommitFileChange from sentry.plugins.providers import RepositoryProvider from sentry.services.hybrid_cloud import coerce_id_from from sentry.services.hybrid_cloud.user.service import user_service diff --git a/tests/apidocs/endpoints/releases/test_organization_release_commit_files.py b/tests/apidocs/endpoints/releases/test_organization_release_commit_files.py index 30a0e9fc15fb8c..bcb671849ea6da 100644 --- a/tests/apidocs/endpoints/releases/test_organization_release_commit_files.py +++ b/tests/apidocs/endpoints/releases/test_organization_release_commit_files.py @@ -2,7 +2,8 @@ from django.urls import reverse from fixtures.apidocs_test_case import APIDocsTestCase -from sentry.models import Commit, CommitFileChange, ReleaseCommit +from sentry.models import Commit, ReleaseCommit +from sentry.models.commitfilechange import CommitFileChange class CommitFileChangeDocsTest(APIDocsTestCase): diff --git a/tests/sentry/api/endpoints/test_commit_filechange.py b/tests/sentry/api/endpoints/test_commit_filechange.py index c7c9d9c6680e92..6c9d3e734173f7 100644 --- a/tests/sentry/api/endpoints/test_commit_filechange.py +++ b/tests/sentry/api/endpoints/test_commit_filechange.py @@ -1,6 +1,7 @@ from django.urls import reverse -from sentry.models import Commit, CommitFileChange, Release, ReleaseCommit, Repository +from sentry.models import Commit, Release, ReleaseCommit, Repository +from sentry.models.commitfilechange import CommitFileChange from sentry.testutils import APITestCase from sentry.testutils.silo import region_silo_test diff --git a/tests/sentry/api/endpoints/test_organization_release_meta.py b/tests/sentry/api/endpoints/test_organization_release_meta.py index d97b62bc97fc8e..809417075c5325 100644 --- a/tests/sentry/api/endpoints/test_organization_release_meta.py +++ b/tests/sentry/api/endpoints/test_organization_release_meta.py @@ -2,7 +2,6 @@ from sentry.models import ( Commit, - CommitFileChange, File, ProjectArtifactBundle, Release, @@ -11,6 +10,7 @@ ReleaseFile, Repository, ) +from sentry.models.commitfilechange import CommitFileChange from sentry.testutils import APITestCase from sentry.testutils.silo import region_silo_test from sentry.utils import json diff --git a/tests/sentry/api/endpoints/test_organization_releases.py b/tests/sentry/api/endpoints/test_organization_releases.py index 004aa0bbda4aff..fc2238cf02cc5a 100644 --- a/tests/sentry/api/endpoints/test_organization_releases.py +++ b/tests/sentry/api/endpoints/test_organization_releases.py @@ -21,7 +21,6 @@ ApiToken, Commit, CommitAuthor, - CommitFileChange, Environment, Release, ReleaseCommit, @@ -31,6 +30,7 @@ ReleaseStages, Repository, ) +from sentry.models.commitfilechange import CommitFileChange from sentry.models.orgauthtoken import OrgAuthToken from sentry.plugins.providers.dummy.repository import DummyRepositoryProvider from sentry.search.events.constants import ( diff --git a/tests/sentry/api/endpoints/test_organization_teams.py b/tests/sentry/api/endpoints/test_organization_teams.py index 15356e0247f340..3cca6d02e854ef 100644 --- a/tests/sentry/api/endpoints/test_organization_teams.py +++ b/tests/sentry/api/endpoints/test_organization_teams.py @@ -2,7 +2,8 @@ from django.urls import reverse -from sentry.models import OrganizationMember, OrganizationMemberTeam, ProjectTeam, Team +from sentry.models import OrganizationMember, OrganizationMemberTeam, Team +from sentry.models.projectteam import ProjectTeam from sentry.testutils import APITestCase from sentry.testutils.silo import region_silo_test from sentry.types.integrations import get_provider_string diff --git a/tests/sentry/api/endpoints/test_project_details.py b/tests/sentry/api/endpoints/test_project_details.py index 76245169faab22..db85356f612074 100644 --- a/tests/sentry/api/endpoints/test_project_details.py +++ b/tests/sentry/api/endpoints/test_project_details.py @@ -23,10 +23,10 @@ ProjectBookmark, ProjectOwnership, ProjectRedirect, - ProjectTeam, Rule, ScheduledDeletion, ) +from sentry.models.projectteam import ProjectTeam from sentry.notifications.types import NotificationSettingOptionValues, NotificationSettingTypes from sentry.silo import unguarded_write from sentry.testutils import APITestCase diff --git a/tests/sentry/api/endpoints/test_project_releases.py b/tests/sentry/api/endpoints/test_project_releases.py index fde576a9fc6a11..2e11e792ca8938 100644 --- a/tests/sentry/api/endpoints/test_project_releases.py +++ b/tests/sentry/api/endpoints/test_project_releases.py @@ -10,7 +10,6 @@ from sentry.constants import BAD_RELEASE_CHARS, MAX_VERSION_LENGTH from sentry.models import ( CommitAuthor, - CommitFileChange, Environment, Release, ReleaseCommit, @@ -18,6 +17,7 @@ ReleaseProjectEnvironment, Repository, ) +from sentry.models.commitfilechange import CommitFileChange from sentry.models.orgauthtoken import OrgAuthToken from sentry.silo import SiloMode from sentry.testutils import APITestCase, ReleaseCommitPatchTest, TestCase diff --git a/tests/sentry/api/endpoints/test_project_team_details.py b/tests/sentry/api/endpoints/test_project_team_details.py index f0b6e6604abd4f..1670ac0468d318 100644 --- a/tests/sentry/api/endpoints/test_project_team_details.py +++ b/tests/sentry/api/endpoints/test_project_team_details.py @@ -1,6 +1,7 @@ from rest_framework import status -from sentry.models import ProjectTeam, Rule +from sentry.models import Rule +from sentry.models.projectteam import ProjectTeam from sentry.testutils import APITestCase from sentry.testutils.helpers import with_feature from sentry.testutils.silo import region_silo_test diff --git a/tests/sentry/api/serializers/test_commit_filechange.py b/tests/sentry/api/serializers/test_commit_filechange.py index 2d9a048e1ee4f1..4fbaf92239800c 100644 --- a/tests/sentry/api/serializers/test_commit_filechange.py +++ b/tests/sentry/api/serializers/test_commit_filechange.py @@ -1,7 +1,8 @@ from uuid import uuid4 from sentry.api.serializers import serialize -from sentry.models import Commit, CommitAuthor, CommitFileChange, Release, ReleaseCommit, Repository +from sentry.models import Commit, CommitAuthor, Release, ReleaseCommit, Repository +from sentry.models.commitfilechange import CommitFileChange from sentry.testutils import TestCase diff --git a/tests/sentry/deletions/test_team.py b/tests/sentry/deletions/test_team.py index 583ddaa0ea03d5..7e3d0e78079736 100644 --- a/tests/sentry/deletions/test_team.py +++ b/tests/sentry/deletions/test_team.py @@ -1,4 +1,5 @@ -from sentry.models import Project, ProjectTeam, Rule, ScheduledDeletion, Team +from sentry.models import Project, Rule, ScheduledDeletion, Team +from sentry.models.projectteam import ProjectTeam from sentry.tasks.deletion.scheduled import run_deletion from sentry.testutils import TestCase from sentry.testutils.silo import region_silo_test diff --git a/tests/sentry/dynamic_sampling/test_generate_rules.py b/tests/sentry/dynamic_sampling/test_generate_rules.py index 7b527b77ca2afe..a9e8531136d8c1 100644 --- a/tests/sentry/dynamic_sampling/test_generate_rules.py +++ b/tests/sentry/dynamic_sampling/test_generate_rules.py @@ -17,7 +17,7 @@ RESERVED_IDS, RuleType, ) -from sentry.models import ProjectTeam +from sentry.models.projectteam import ProjectTeam from sentry.testutils.factories import Factories from sentry.testutils.helpers import Feature from sentry.utils import json diff --git a/tests/sentry/manager/test_projectteam_manager.py b/tests/sentry/manager/test_projectteam_manager.py index b39acc45713a45..04b0e05cc98947 100644 --- a/tests/sentry/manager/test_projectteam_manager.py +++ b/tests/sentry/manager/test_projectteam_manager.py @@ -1,4 +1,5 @@ -from sentry.models import ProjectTeam, Team, User +from sentry.models import Team, User +from sentry.models.projectteam import ProjectTeam from sentry.testutils import TestCase from sentry.testutils.silo import region_silo_test diff --git a/tests/sentry/models/test_commitfilechange.py b/tests/sentry/models/test_commitfilechange.py index 6db584b3facf35..c7ec8056424af5 100644 --- a/tests/sentry/models/test_commitfilechange.py +++ b/tests/sentry/models/test_commitfilechange.py @@ -1,4 +1,5 @@ -from sentry.models import Commit, CommitFileChange, Repository +from sentry.models import Commit, Repository +from sentry.models.commitfilechange import CommitFileChange from sentry.testutils import TestCase from sentry.testutils.silo import region_silo_test diff --git a/tests/sentry/models/test_project.py b/tests/sentry/models/test_project.py index 914cba8284a006..072a6883a4b25c 100644 --- a/tests/sentry/models/test_project.py +++ b/tests/sentry/models/test_project.py @@ -11,7 +11,6 @@ OrganizationMemberTeam, Project, ProjectOwnership, - ProjectTeam, RegionScheduledDeletion, Release, ReleaseProject, @@ -21,6 +20,7 @@ UserOption, ) from sentry.models.actor import get_actor_for_user +from sentry.models.projectteam import ProjectTeam from sentry.monitors.models import Monitor, MonitorType, ScheduleType from sentry.notifications.types import NotificationSettingOptionValues, NotificationSettingTypes from sentry.services.hybrid_cloud.actor import RpcActor diff --git a/tests/sentry/models/test_team.py b/tests/sentry/models/test_team.py index 4c8eae1705b1e4..39d08e2f9d9d40 100644 --- a/tests/sentry/models/test_team.py +++ b/tests/sentry/models/test_team.py @@ -4,13 +4,13 @@ OrganizationMember, OrganizationMemberTeam, Project, - ProjectTeam, Release, ReleaseProject, ReleaseProjectEnvironment, Team, ) from sentry.models.notificationsetting import NotificationSetting +from sentry.models.projectteam import ProjectTeam from sentry.notifications.types import NotificationSettingOptionValues, NotificationSettingTypes from sentry.tasks.deletion.hybrid_cloud import schedule_hybrid_cloud_foreign_key_jobs from sentry.testutils import TestCase diff --git a/tests/sentry/models/test_teamkeytransaction.py b/tests/sentry/models/test_teamkeytransaction.py index 8e77f728f9dc8a..7a4586b52d5aa2 100644 --- a/tests/sentry/models/test_teamkeytransaction.py +++ b/tests/sentry/models/test_teamkeytransaction.py @@ -2,7 +2,7 @@ from unittest.mock import patch from sentry.discover.models import TeamKeyTransaction, TeamKeyTransactionModelManager -from sentry.models import ProjectTeam +from sentry.models.projectteam import ProjectTeam from sentry.signals import receivers_raise_on_send from sentry.testutils import TransactionTestCase from sentry.testutils.helpers import Feature diff --git a/tests/sentry/relay/test_config.py b/tests/sentry/relay/test_config.py index 60e7f84bdea0c4..ad51d2cd9221c1 100644 --- a/tests/sentry/relay/test_config.py +++ b/tests/sentry/relay/test_config.py @@ -18,7 +18,8 @@ get_redis_client_for_ds, ) from sentry.dynamic_sampling.rules.base import NEW_MODEL_THRESHOLD_IN_MINUTES -from sentry.models import ProjectKey, ProjectTeam +from sentry.models import ProjectKey +from sentry.models.projectteam import ProjectTeam from sentry.models.transaction_threshold import TransactionMetric from sentry.relay.config import ProjectConfig, get_project_config from sentry.snuba.dataset import Dataset diff --git a/tests/sentry/snuba/test_discover_query.py b/tests/sentry/snuba/test_discover_query.py index 017fc078853f5a..8080977e1c2d31 100644 --- a/tests/sentry/snuba/test_discover_query.py +++ b/tests/sentry/snuba/test_discover_query.py @@ -6,7 +6,8 @@ from sentry.discover.arithmetic import ArithmeticValidationError from sentry.discover.models import TeamKeyTransaction from sentry.exceptions import InvalidSearchQuery -from sentry.models import ProjectTeam, ProjectTransactionThreshold, ReleaseStages +from sentry.models import ProjectTransactionThreshold, ReleaseStages +from sentry.models.projectteam import ProjectTeam from sentry.models.transaction_threshold import ( ProjectTransactionThresholdOverride, TransactionMetric, diff --git a/tests/sentry/tasks/test_code_owners.py b/tests/sentry/tasks/test_code_owners.py index 7af8eacfdf24d7..420c3accfbfeb0 100644 --- a/tests/sentry/tasks/test_code_owners.py +++ b/tests/sentry/tasks/test_code_owners.py @@ -1,13 +1,7 @@ from unittest.mock import patch -from sentry.models import ( - Commit, - CommitFileChange, - ExternalActor, - ProjectCodeOwners, - ProjectOwnership, - Repository, -) +from sentry.models import Commit, ExternalActor, ProjectCodeOwners, ProjectOwnership, Repository +from sentry.models.commitfilechange import CommitFileChange from sentry.tasks.codeowners import code_owners_auto_sync, update_code_owners_schema from sentry.testutils import TestCase diff --git a/tests/sentry/tasks/test_post_process.py b/tests/sentry/tasks/test_post_process.py index 276db8b3694b7c..1fec625587ee35 100644 --- a/tests/sentry/tasks/test_post_process.py +++ b/tests/sentry/tasks/test_post_process.py @@ -32,7 +32,6 @@ GroupStatus, Integration, ProjectOwnership, - ProjectTeam, ) from sentry.models.activity import ActivityIntegration from sentry.models.groupowner import ( @@ -41,6 +40,7 @@ ISSUE_OWNERS_DEBOUNCE_DURATION, ISSUE_OWNERS_DEBOUNCE_KEY, ) +from sentry.models.projectteam import ProjectTeam from sentry.ownership.grammar import Matcher, Owner, Rule, dump_schema from sentry.rules import init_registry from sentry.services.hybrid_cloud.user.service import user_service diff --git a/tests/sentry/utils/suspect_resolutions/test_commit_correlation.py b/tests/sentry/utils/suspect_resolutions/test_commit_correlation.py index 4bb08515738b8a..d4a9be7153c5c8 100644 --- a/tests/sentry/utils/suspect_resolutions/test_commit_correlation.py +++ b/tests/sentry/utils/suspect_resolutions/test_commit_correlation.py @@ -2,14 +2,8 @@ from django.utils import timezone -from sentry.models import ( - Activity, - Commit, - CommitFileChange, - GroupRelease, - GroupStatus, - ReleaseCommit, -) +from sentry.models import Activity, Commit, GroupRelease, GroupStatus, ReleaseCommit +from sentry.models.commitfilechange import CommitFileChange from sentry.testutils import TestCase from sentry.testutils.silo import region_silo_test from sentry.types.activity import ActivityType diff --git a/tests/sentry/utils/test_committers.py b/tests/sentry/utils/test_committers.py index 919a67e5478810..52176dee7c59f9 100644 --- a/tests/sentry/utils/test_committers.py +++ b/tests/sentry/utils/test_committers.py @@ -7,15 +7,8 @@ from django.utils import timezone from sentry.integrations.github.integration import GitHubIntegration -from sentry.models import ( - Commit, - CommitAuthor, - CommitFileChange, - GroupRelease, - Release, - ReleaseCommit, - Repository, -) +from sentry.models import Commit, CommitAuthor, GroupRelease, Release, ReleaseCommit, Repository +from sentry.models.commitfilechange import CommitFileChange from sentry.models.groupowner import GroupOwner, GroupOwnerType from sentry.models.integrations.integration import Integration from sentry.testutils import TestCase diff --git a/tests/snuba/api/endpoints/test_discover_key_transactions.py b/tests/snuba/api/endpoints/test_discover_key_transactions.py index af1045a37ccaa2..7b719213bb8120 100644 --- a/tests/snuba/api/endpoints/test_discover_key_transactions.py +++ b/tests/snuba/api/endpoints/test_discover_key_transactions.py @@ -1,7 +1,7 @@ from django.urls import reverse from sentry.discover.models import MAX_TEAM_KEY_TRANSACTIONS, TeamKeyTransaction -from sentry.models import ProjectTeam +from sentry.models.projectteam import ProjectTeam from sentry.testutils import APITestCase, SnubaTestCase from sentry.testutils.helpers import parse_link_header from sentry.testutils.silo import region_silo_test diff --git a/tests/snuba/api/endpoints/test_organization_events.py b/tests/snuba/api/endpoints/test_organization_events.py index 0e13f6f3c3ccb7..87101fae6dd464 100644 --- a/tests/snuba/api/endpoints/test_organization_events.py +++ b/tests/snuba/api/endpoints/test_organization_events.py @@ -15,7 +15,8 @@ from sentry.discover.models import TeamKeyTransaction from sentry.issues.grouptype import ProfileFileIOGroupType -from sentry.models import ApiKey, ProjectTeam, ProjectTransactionThreshold, ReleaseStages +from sentry.models import ApiKey, ProjectTransactionThreshold, ReleaseStages +from sentry.models.projectteam import ProjectTeam from sentry.models.transaction_threshold import ( ProjectTransactionThresholdOverride, TransactionMetric, diff --git a/tests/snuba/api/endpoints/test_organization_events_mep.py b/tests/snuba/api/endpoints/test_organization_events_mep.py index 8e91c974a01807..b21ebec27b33d9 100644 --- a/tests/snuba/api/endpoints/test_organization_events_mep.py +++ b/tests/snuba/api/endpoints/test_organization_events_mep.py @@ -5,7 +5,7 @@ from sentry.api.bases.organization_events import DATASET_OPTIONS from sentry.discover.models import TeamKeyTransaction -from sentry.models import ProjectTeam +from sentry.models.projectteam import ProjectTeam from sentry.models.transaction_threshold import ( ProjectTransactionThreshold, ProjectTransactionThresholdOverride, From 36a2de642efbf1043592281d4b3605181a649449 Mon Sep 17 00:00:00 2001 From: Spencer Murray <62224025+spalmurray@users.noreply.github.com> Date: Mon, 17 Jul 2023 18:50:43 +0000 Subject: [PATCH 33/67] fix(discord): Change log level from error to info for unauthorized interactions (#52946) Receiving unauthorized transactions should not cause an issue in Sentry. Fixes SENTRY-13A0 --- src/sentry/integrations/discord/requests/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/integrations/discord/requests/base.py b/src/sentry/integrations/discord/requests/base.py index 6939832d252644..c7562ba192d603 100644 --- a/src/sentry/integrations/discord/requests/base.py +++ b/src/sentry/integrations/discord/requests/base.py @@ -85,7 +85,7 @@ def authorize(self) -> None: if signature and timestamp and verify_signature(public_key, signature, timestamp + body): return - self._error("discord.interactions.auth") + self._info("discord.interactions.auth") raise DiscordRequestError(status=status.HTTP_401_UNAUTHORIZED) def _validate_data(self) -> None: From 25c3cdd8c8e7db958c3ab3165b78ab90b2cc8a49 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Mon, 17 Jul 2023 11:54:09 -0700 Subject: [PATCH 34/67] fix(releases): Use query parameter when switching filters (#52937) --- .../views/releases/detail/overview/index.tsx | 1 - ...Issues.spec.jsx => releaseIssues.spec.tsx} | 77 +++++++++----- .../detail/overview/releaseIssues.tsx | 100 +++++++----------- 3 files changed, 87 insertions(+), 91 deletions(-) rename static/app/views/releases/detail/overview/{releaseIssues.spec.jsx => releaseIssues.spec.tsx} (82%) diff --git a/static/app/views/releases/detail/overview/index.tsx b/static/app/views/releases/detail/overview/index.tsx index b3f11f5cb88e12..251c7d77a44503 100644 --- a/static/app/views/releases/detail/overview/index.tsx +++ b/static/app/views/releases/detail/overview/index.tsx @@ -544,7 +544,6 @@ class ReleaseOverview extends DeprecatedAsyncView { ); + const {rerender} = render(); expect(await screen.findByText('No new issues in this release.')).toBeInTheDocument(); await userEvent.click(screen.getByRole('radio', {name: 'Resolved 0'})); + // Simulate query change + rerender( + + ); expect( await screen.findByText('No resolved issues in this release.') ).toBeInTheDocument(); }); it('shows an empty sttate with stats period', async function () { - render(); + const query = {pageStatsPeriod: '24h'}; + const {rerender} = render( + + ); expect( await screen.findByText( @@ -85,6 +94,13 @@ describe('ReleaseIssues', function () { ).toBeInTheDocument(); await userEvent.click(screen.getByRole('radio', {name: 'Unhandled 0'})); + // Simulate query change + rerender( + + ); expect( await screen.findByText( textWithMarkupMatcher('No unhandled issues for the last 24 hours.') @@ -92,54 +108,61 @@ describe('ReleaseIssues', function () { ).toBeInTheDocument(); }); - it('filters the issues', async function () { - render(); - - expect(screen.getAllByRole('radio')).toHaveLength(5); - await screen.findByRole('radio', {name: 'New Issues 0'}); - - await userEvent.click(screen.getByRole('radio', {name: 'New Issues 0'})); - expect(newIssuesEndpoint).toHaveBeenCalledTimes(1); - - await userEvent.click(screen.getByRole('radio', {name: 'Resolved 0'})); - expect(resolvedIssuesEndpoint).toHaveBeenCalledTimes(1); - - await userEvent.click(screen.getByRole('radio', {name: 'Unhandled 0'})); - expect(unhandledIssuesEndpoint).toHaveBeenCalledTimes(1); - - await userEvent.click(screen.getByRole('radio', {name: 'All Issues 0'})); - expect(allIssuesEndpoint).toHaveBeenCalledTimes(1); - }); - - it('renders link to Issues', async function () { + it('can switch issue filters', async function () { const {routerContext} = initializeOrg(); - render(, {context: routerContext}); + const {rerender} = render(, {context: routerContext}); + // New + expect(await screen.findByRole('radio', {name: 'New Issues 0'})).toBeChecked(); expect(screen.getByRole('button', {name: 'Open in Issues'})).toHaveAttribute( 'href', '/organizations/org-slug/issues/?end=2020-03-24T02%3A04%3A59Z&groupStatsPeriod=auto&query=firstRelease%3A1.0.0&sort=freq&start=2020-03-23T01%3A02%3A00Z' ); + expect(newIssuesEndpoint).toHaveBeenCalledTimes(1); - await screen.findByRole('radio', {name: 'Resolved 0'}); - + // Resolved await userEvent.click(screen.getByRole('radio', {name: 'Resolved 0'})); + // Simulate query change + rerender( + + ); expect(screen.getByRole('button', {name: 'Open in Issues'})).toHaveAttribute( 'href', '/organizations/org-slug/issues/?end=2020-03-24T02%3A04%3A59Z&groupStatsPeriod=auto&query=release%3A1.0.0&sort=freq&start=2020-03-23T01%3A02%3A00Z' ); + expect(resolvedIssuesEndpoint).toHaveBeenCalledTimes(1); + // Unhandled await userEvent.click(screen.getByRole('radio', {name: 'Unhandled 0'})); + rerender( + + ); expect(screen.getByRole('button', {name: 'Open in Issues'})).toHaveAttribute( 'href', '/organizations/org-slug/issues/?end=2020-03-24T02%3A04%3A59Z&groupStatsPeriod=auto&query=release%3A1.0.0%20error.handled%3A0&sort=freq&start=2020-03-23T01%3A02%3A00Z' ); + expect(unhandledIssuesEndpoint).toHaveBeenCalledTimes(1); + // All await userEvent.click(screen.getByRole('radio', {name: 'All Issues 0'})); + rerender( + + ); expect(screen.getByRole('button', {name: 'Open in Issues'})).toHaveAttribute( 'href', '/organizations/org-slug/issues/?end=2020-03-24T02%3A04%3A59Z&groupStatsPeriod=auto&query=release%3A1.0.0&sort=freq&start=2020-03-23T01%3A02%3A00Z' ); + expect(allIssuesEndpoint).toHaveBeenCalledTimes(1); }); it('includes release context when linking to issue', async function () { diff --git a/static/app/views/releases/detail/overview/releaseIssues.tsx b/static/app/views/releases/detail/overview/releaseIssues.tsx index 360b6a75b3a8e1..59d4412426c4aa 100644 --- a/static/app/views/releases/detail/overview/releaseIssues.tsx +++ b/static/app/views/releases/detail/overview/releaseIssues.tsx @@ -16,7 +16,7 @@ import {SegmentedControl} from 'sentry/components/segmentedControl'; import {DEFAULT_RELATIVE_PERIODS} from 'sentry/constants'; import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import {Organization, PageFilters} from 'sentry/types'; +import {Organization} from 'sentry/types'; import {MutableSearch} from 'sentry/utils/tokenizeSearch'; import withApi from 'sentry/utils/withApi'; import withOrganization from 'sentry/utils/withOrganization'; @@ -33,13 +33,13 @@ enum IssuesType { ALL = 'all', } -enum IssuesQuery { - NEW = 'first-release', - UNHANDLED = 'error.handled:0', - REGRESSED = 'regressed_in_release', - RESOLVED = 'is:resolved', - ALL = 'release', -} +const issuesQuery: Record = { + [IssuesType.NEW]: 'first-release', + [IssuesType.UNHANDLED]: 'error.handled:0', + [IssuesType.REGRESSED]: 'regressed_in_release', + [IssuesType.RESOLVED]: 'is:resolved', + [IssuesType.ALL]: 'release', +}; type IssuesQueryParams = { limit: number; @@ -56,7 +56,6 @@ type Props = { location: Location; organization: Organization; releaseBounds: ReleaseBounds; - selection: PageFilters; version: string; queryFilterDescription?: string; } & Partial; @@ -69,7 +68,6 @@ type State = { resolved: number | null; unhandled: number | null; }; - issuesType: IssuesType; onCursor?: () => void; pageLinks?: string; }; @@ -79,24 +77,7 @@ class ReleaseIssues extends Component { state: State = this.getInitialState(); getInitialState() { - const {location} = this.props; - const query = location.query ? location.query.issuesType : null; - const issuesTypeState = !query - ? IssuesType.NEW - : query.includes(IssuesType.NEW) - ? IssuesType.NEW - : query.includes(IssuesType.UNHANDLED) - ? IssuesType.REGRESSED - : query.includes(IssuesType.REGRESSED) - ? IssuesType.UNHANDLED - : query.includes(IssuesType.RESOLVED) - ? IssuesType.RESOLVED - : query.includes(IssuesType.ALL) - ? IssuesType.ALL - : IssuesType.ALL; - return { - issuesType: issuesTypeState, count: { new: null, all: null, @@ -128,9 +109,16 @@ class ReleaseIssues extends Component { } } + getActiveIssuesType(): IssuesType { + const query = (this.props.location.query?.issuesType as string) ?? ''; + return Object.values(IssuesType).includes(query) + ? (query as IssuesType) + : IssuesType.NEW; + } + getIssuesUrl() { const {version, organization} = this.props; - const {issuesType} = this.state; + const issuesType = this.getActiveIssuesType(); const {queryParams} = this.getIssuesEndpoint(); const query = new MutableSearch([]); @@ -164,7 +152,7 @@ class ReleaseIssues extends Component { getIssuesEndpoint(): {path: string; queryParams: IssuesQueryParams} { const {version, organization, location, releaseBounds} = this.props; - const {issuesType} = this.state; + const issuesType = this.getActiveIssuesType(); const queryParams = { ...getReleaseParams({ @@ -183,7 +171,7 @@ class ReleaseIssues extends Component { queryParams: { ...queryParams, query: new MutableSearch([ - `${IssuesQuery.ALL}:${version}`, + `${issuesQuery.all}:${version}`, 'is:unresolved', ]).formatString(), }, @@ -201,8 +189,8 @@ class ReleaseIssues extends Component { queryParams: { ...queryParams, query: new MutableSearch([ - `${IssuesQuery.ALL}:${version}`, - IssuesQuery.UNHANDLED, + `${issuesQuery.all}:${version}`, + issuesQuery.unhandled, 'is:unresolved', ]).formatString(), }, @@ -213,7 +201,7 @@ class ReleaseIssues extends Component { queryParams: { ...queryParams, query: new MutableSearch([ - `${IssuesQuery.REGRESSED}:${version}`, + `${issuesQuery.regressed}:${version}`, ]).formatString(), }, }; @@ -224,7 +212,7 @@ class ReleaseIssues extends Component { queryParams: { ...queryParams, query: new MutableSearch([ - `${IssuesQuery.NEW}:${version}`, + `${issuesQuery.new}:${version}`, 'is:unresolved', ]).formatString(), }, @@ -246,14 +234,14 @@ class ReleaseIssues extends Component { ]).then(([issueResponse, resolvedResponse]) => { this.setState({ count: { - all: issueResponse[`${IssuesQuery.ALL}:"${version}" is:unresolved`] || 0, - new: issueResponse[`${IssuesQuery.NEW}:"${version}" is:unresolved`] || 0, + all: issueResponse[`${issuesQuery.all}:"${version}" is:unresolved`] || 0, + new: issueResponse[`${issuesQuery.new}:"${version}" is:unresolved`] || 0, resolved: resolvedResponse.length, unhandled: issueResponse[ - `${IssuesQuery.UNHANDLED} ${IssuesQuery.ALL}:"${version}" is:unresolved` + `${issuesQuery.unhandled} ${issuesQuery.all}:"${version}" is:unresolved` ] || 0, - regressed: issueResponse[`${IssuesQuery.REGRESSED}:"${version}"`] || 0, + regressed: issueResponse[`${issuesQuery.regressed}:"${version}"`] || 0, }, }); }); @@ -267,10 +255,10 @@ class ReleaseIssues extends Component { const issuesCountPath = `/organizations/${organization.slug}/issues-count/`; const params = [ - `${IssuesQuery.NEW}:"${version}" is:unresolved`, - `${IssuesQuery.ALL}:"${version}" is:unresolved`, - `${IssuesQuery.UNHANDLED} ${IssuesQuery.ALL}:"${version}" is:unresolved`, - `${IssuesQuery.REGRESSED}:"${version}"`, + `${issuesQuery.new}:"${version}" is:unresolved`, + `${issuesQuery.all}:"${version}" is:unresolved`, + `${issuesQuery.unhandled} ${issuesQuery.all}:"${version}" is:unresolved`, + `${issuesQuery.regressed}:"${version}"`, ]; const queryParams = params.map(param => param); const queryParameters = { @@ -286,29 +274,14 @@ class ReleaseIssues extends Component { handleIssuesTypeSelection = (issuesType: IssuesType) => { const {location} = this.props; - const issuesTypeQuery = - issuesType === IssuesType.ALL - ? IssuesType.ALL - : issuesType === IssuesType.NEW - ? IssuesType.NEW - : issuesType === IssuesType.RESOLVED - ? IssuesType.RESOLVED - : issuesType === IssuesType.UNHANDLED - ? IssuesType.UNHANDLED - : issuesType === IssuesType.REGRESSED - ? IssuesType.REGRESSED - : ''; - - const to = { + + browserHistory.replace({ ...location, query: { ...location.query, - issuesType: issuesTypeQuery, + issuesType, }, - }; - - browserHistory.replace(to); - this.setState({issuesType}); + }); }; handleFetchSuccess = (groupListState, onCursor) => { @@ -317,7 +290,7 @@ class ReleaseIssues extends Component { renderEmptyMessage = () => { const {location, releaseBounds} = this.props; - const {issuesType} = this.state; + const issuesType = this.getActiveIssuesType(); const isEntireReleasePeriod = !location.query.pageStatsPeriod && !location.query.pageStart; @@ -367,7 +340,8 @@ class ReleaseIssues extends Component { }; render() { - const {issuesType, count, pageLinks, onCursor} = this.state; + const {count, pageLinks, onCursor} = this.state; + const issuesType = this.getActiveIssuesType(); const {organization, queryFilterDescription, withChart, version} = this.props; const {path, queryParams} = this.getIssuesEndpoint(); const issuesTypes = [ From 4ea79c8a11f5923bce8d95c9f39685614d0f35cd Mon Sep 17 00:00:00 2001 From: Gilbert Szeto Date: Mon, 17 Jul 2023 11:54:36 -0700 Subject: [PATCH 35/67] fix(group-attributes): log metric when post_save.send(update_fields=["status", "subs"]) is called for group (#52996) Was looking at the incorrect kwargs key when `post_save.send()` was called. --- src/sentry/issues/attributes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/issues/attributes.py b/src/sentry/issues/attributes.py index a4272057c72d0e..d521c29047be32 100644 --- a/src/sentry/issues/attributes.py +++ b/src/sentry/issues/attributes.py @@ -41,7 +41,7 @@ def post_save_log_group_attributes_changed(instance, sender, created, *args, **k if created: _log_group_attributes_changed(Operation.CREATED, "group", None) else: - if "updated_fields" in kwargs: + if "update_fields" in kwargs: update_fields = kwargs["update_fields"] # we have no guarantees update_fields is used everywhere save() is called # we'll need to assume any of the attributes are updated in that case From 8004e59cf49111ad7665157ff04beeb6d0da890a Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Mon, 17 Jul 2023 11:59:13 -0700 Subject: [PATCH 36/67] ref(replay): Refactor the "Next Breadcrumb" button to use *Frame types (#52931) This is the code powering the "Next breadcrumb" button: SCR-20230714-opsj I also clicked through a replay to verify the the timestamp is being updated, and the video is progressing. --- .../components/replays/replayController.tsx | 17 +++-- .../app/utils/replays/getReplayEvent.spec.tsx | 69 ++++++++++--------- static/app/utils/replays/getReplayEvent.tsx | 21 +++--- static/app/utils/replays/replayReader.tsx | 28 -------- 4 files changed, 55 insertions(+), 80 deletions(-) diff --git a/static/app/components/replays/replayController.tsx b/static/app/components/replays/replayController.tsx index 9c7d9596d85492..87593aefbd687a 100644 --- a/static/app/components/replays/replayController.tsx +++ b/static/app/components/replays/replayController.tsx @@ -8,7 +8,7 @@ import {CompositeSelect} from 'sentry/components/compactSelect/composite'; import {PlayerScrubber} from 'sentry/components/replays/player/scrubber'; import useScrubberMouseTracking from 'sentry/components/replays/player/useScrubberMouseTracking'; import {useReplayContext} from 'sentry/components/replays/replayContext'; -import {formatTime, relativeTimeInMs} from 'sentry/components/replays/utils'; +import {formatTime} from 'sentry/components/replays/utils'; import { IconContract, IconExpand, @@ -24,7 +24,7 @@ import ConfigStore from 'sentry/stores/configStore'; import {useLegacyStore} from 'sentry/stores/useLegacyStore'; import {space} from 'sentry/styles/space'; import {trackAnalytics} from 'sentry/utils/analytics'; -import {getNextReplayEvent} from 'sentry/utils/replays/getReplayEvent'; +import {getNextReplayFrame} from 'sentry/utils/replays/getReplayEvent'; import useFullscreen from 'sentry/utils/replays/hooks/useFullscreen'; import useOrganization from 'sentry/utils/useOrganization'; @@ -79,17 +79,16 @@ function ReplayPlayPauseBar() { title={t('Next breadcrumb')} icon={} onClick={() => { - const startTimestampMs = replay?.getReplay().started_at?.getTime(); - if (!startTimestampMs) { + if (!replay) { return; } - const next = getNextReplayEvent({ - items: replay?.getUserActionCrumbs() || [], - targetTimestampMs: startTimestampMs + currentTime, + const next = getNextReplayFrame({ + frames: replay.getChapterFrames(), + targetOffsetMs: currentTime, }); - if (startTimestampMs !== undefined && next?.timestamp) { - setCurrentTime(relativeTimeInMs(next.timestamp, startTimestampMs)); + if (next) { + setCurrentTime(next.offsetMs); } }} aria-label={t('Fast-forward to next breadcrumb')} diff --git a/static/app/utils/replays/getReplayEvent.spec.tsx b/static/app/utils/replays/getReplayEvent.spec.tsx index ae269d630c9b14..2840bff45beca8 100644 --- a/static/app/utils/replays/getReplayEvent.spec.tsx +++ b/static/app/utils/replays/getReplayEvent.spec.tsx @@ -1,8 +1,9 @@ import {BreadcrumbLevelType, BreadcrumbType, Crumb} from 'sentry/types/breadcrumbs'; import { - getNextReplayEvent, + getNextReplayFrame, getPrevReplayEvent, } from 'sentry/utils/replays/getReplayEvent'; +import hydrateBreadcrumbs from 'sentry/utils/replays/hydrateBreadcrumbs'; const START_TIMESTAMP_SEC = 1651693622.951; const CURRENT_TIME_MS = 15000; @@ -73,67 +74,71 @@ function createCrumbs(): Crumb[] { ]; } -describe('getNextReplayEvent', () => { +describe('getNextReplayFrame', () => { + const frames = hydrateBreadcrumbs(TestStubs.ReplayRecord(), [ + TestStubs.Replay.ClickFrame({timestamp: new Date('2022-05-11T22:41:32.002Z')}), + TestStubs.Replay.ClickFrame({timestamp: new Date('2022-05-04T19:47:08.085000Z')}), + TestStubs.Replay.ClickFrame({timestamp: new Date('2022-05-04T19:47:11.086000Z')}), + TestStubs.Replay.ClickFrame({timestamp: new Date('2022-05-04T19:47:52.915000Z')}), + TestStubs.Replay.ClickFrame({timestamp: new Date('2022-05-04T19:47:59.915000Z')}), + ]); + + TestStubs.Replay.ClickEvent; it('should return the next crumb', () => { - const crumbs = createCrumbs(); - const results = getNextReplayEvent({ - items: crumbs, - targetTimestampMs: START_TIMESTAMP_SEC * 1000 + CURRENT_TIME_MS, + const results = getNextReplayFrame({ + frames, + targetOffsetMs: CURRENT_TIME_MS, }); - expect(results?.id).toEqual(20); + expect(results).toEqual(frames[1]); }); it('should return the next crumb when the the list is not sorted', () => { - const [one, two, three, four, five] = createCrumbs(); - const results = getNextReplayEvent({ - items: [one, four, five, three, two], - targetTimestampMs: START_TIMESTAMP_SEC * 1000 + CURRENT_TIME_MS, + const [one, two, three, four, five] = frames; + const results = getNextReplayFrame({ + frames: [one, four, five, three, two], + targetOffsetMs: CURRENT_TIME_MS, }); - expect(results?.id).toEqual(20); + expect(results).toEqual(frames[1]); }); it('should return undefined when there are no crumbs', () => { - const crumbs = []; - const results = getNextReplayEvent({ - items: crumbs, - targetTimestampMs: START_TIMESTAMP_SEC * 1000 + CURRENT_TIME_MS, + const results = getNextReplayFrame({ + frames: [], + targetOffsetMs: CURRENT_TIME_MS, }); expect(results).toBeUndefined(); }); it('should return undefined when the timestamp is later than any crumbs', () => { - const crumbs = createCrumbs(); - const results = getNextReplayEvent({ - items: crumbs, - targetTimestampMs: START_TIMESTAMP_SEC * 1000 + 99999999999, + const results = getNextReplayFrame({ + frames, + targetOffsetMs: 99999999999, }); expect(results).toBeUndefined(); }); it('should return the crumb after when a timestamp exactly matches', () => { - const crumbs = createCrumbs(); - const exactCrumbTime = 8135; - const results = getNextReplayEvent({ - items: crumbs, - targetTimestampMs: START_TIMESTAMP_SEC * 1000 + exactCrumbTime, + const exactTime = 8135; + const results = getNextReplayFrame({ + frames, + targetOffsetMs: exactTime, }); - expect(results?.id).toEqual(20); + expect(results).toEqual(frames[1]); }); it('should return the crumb if timestamps exactly match and allowMatch is enabled', () => { - const crumbs = createCrumbs(); - const exactCrumbTime = 8135; - const results = getNextReplayEvent({ - items: crumbs, - targetTimestampMs: START_TIMESTAMP_SEC * 1000 + exactCrumbTime, + const exactTime = 8135; + const results = getNextReplayFrame({ + frames, + targetOffsetMs: exactTime, }); - expect(results?.id).toEqual(20); + expect(results).toEqual(frames[1]); }); }); diff --git a/static/app/utils/replays/getReplayEvent.tsx b/static/app/utils/replays/getReplayEvent.tsx index fccd58692b89fa..4b103babbee6e7 100644 --- a/static/app/utils/replays/getReplayEvent.tsx +++ b/static/app/utils/replays/getReplayEvent.tsx @@ -1,6 +1,7 @@ import sortedIndexBy from 'lodash/sortedIndexBy'; import type {Crumb} from 'sentry/types/breadcrumbs'; +import type {ReplayFrame} from 'sentry/utils/replays/types'; import type {ReplaySpan} from 'sentry/views/replays/types'; export function getPrevReplayEvent({ @@ -27,25 +28,23 @@ export function getPrevReplayEvent({ return undefined; } -export function getNextReplayEvent({ - items, - targetTimestampMs, +export function getNextReplayFrame({ + frames, + targetOffsetMs, allowExact = false, }: { - items: T[]; - targetTimestampMs: number; + frames: ReplayFrame[]; + targetOffsetMs: number; allowExact?: boolean; }) { - return items.reduce((found, item) => { - const itemTimestampMS = +new Date(item.timestamp || ''); - + return frames.reduce((found, item) => { if ( - itemTimestampMS < targetTimestampMs || - (!allowExact && itemTimestampMS === targetTimestampMs) + item.offsetMs < targetOffsetMs || + (!allowExact && item.offsetMs === targetOffsetMs) ) { return found; } - if (!found || itemTimestampMS < +new Date(found.timestamp || '')) { + if (!found || item.timestampMs < found.timestampMs) { return item; } return found; diff --git a/static/app/utils/replays/replayReader.tsx b/static/app/utils/replays/replayReader.tsx index 21979d68771162..7c4803a9d2267e 100644 --- a/static/app/utils/replays/replayReader.tsx +++ b/static/app/utils/replays/replayReader.tsx @@ -33,7 +33,6 @@ import type { SpanFrame, } from 'sentry/utils/replays/types'; import type { - MemorySpan, NetworkSpan, RecordingEvent, ReplayCrumb, @@ -306,31 +305,10 @@ export default class ReplayReader { /*********************/ /** OLD STUFF BELOW **/ /*********************/ - getCrumbsWithRRWebNodes = memoize(() => - this.breadcrumbs.filter( - crumb => crumb.data && typeof crumb.data === 'object' && 'nodeId' in crumb.data - ) - ); - - getUserActionCrumbs = memoize(() => { - const USER_ACTIONS = [ - BreadcrumbType.ERROR, - BreadcrumbType.INIT, - BreadcrumbType.NAVIGATION, - BreadcrumbType.UI, - BreadcrumbType.USER, - ]; - return this.breadcrumbs.filter(crumb => USER_ACTIONS.includes(crumb.type)); - }); - getConsoleCrumbs = memoize(() => this.breadcrumbs.filter(crumb => crumb.category === 'console') ); - getIssueCrumbs = memoize(() => - this.breadcrumbs.filter(crumb => crumb.category === 'issue') - ); - getNonConsoleCrumbs = memoize(() => this.breadcrumbs.filter(crumb => crumb.category !== 'console') ); @@ -342,14 +320,8 @@ export default class ReplayReader { ); getNetworkSpans = memoize(() => this.sortedSpans.filter(isNetworkSpan)); - - getMemorySpans = memoize(() => this.sortedSpans.filter(isMemorySpan)); } -const isMemorySpan = (span: ReplaySpan): span is MemorySpan => { - return span.op === 'memory'; -}; - const isNetworkSpan = (span: ReplaySpan): span is NetworkSpan => { return span.op?.startsWith('navigation.') || span.op?.startsWith('resource.'); }; From a257237eb1d784a9e929913d583afce4c831d1ae Mon Sep 17 00:00:00 2001 From: William Mak Date: Mon, 17 Jul 2023 15:14:26 -0400 Subject: [PATCH 37/67] feat(starfish): Add unit and types for rates (#52991) - This adds a `rate` type for epm and eps - This adds a `1/minute` and `1/second` unit for epm and eps respectively --- src/sentry/api/bases/organization_events.py | 7 +++++++ src/sentry/search/events/constants.py | 11 ++++++++++- src/sentry/search/events/datasets/discover.py | 4 ++-- src/sentry/search/events/datasets/metrics.py | 6 +++--- src/sentry/search/events/datasets/metrics_layer.py | 4 ++-- src/sentry/search/events/datasets/spans_indexed.py | 4 ++-- src/sentry/search/events/datasets/spans_metrics.py | 4 ++-- tests/snuba/api/endpoints/test_organization_events.py | 3 +++ .../api/endpoints/test_organization_events_mep.py | 9 ++++++--- .../test_organization_events_span_metrics.py | 8 ++++++++ 10 files changed, 45 insertions(+), 15 deletions(-) diff --git a/src/sentry/api/bases/organization_events.py b/src/sentry/api/bases/organization_events.py index ee2e7166e93631..431860ae4ff533 100644 --- a/src/sentry/api/bases/organization_events.py +++ b/src/sentry/api/bases/organization_events.py @@ -293,6 +293,13 @@ def handle_unit_meta( elif value in DURATION_UNITS: units[key] = value meta[key] = "duration" + elif value == "rate": + if key in ["eps()", "sps()", "tps()"]: + units[key] = "1/second" + elif key in ["epm()", "spm()", "tpm()"]: + units[key] = "1/minute" + else: + units[key] = None elif value == "duration": units[key] = "millisecond" else: diff --git a/src/sentry/search/events/constants.py b/src/sentry/search/events/constants.py index 9cdd0d036bc37d..e85c2db60a1244 100644 --- a/src/sentry/search/events/constants.py +++ b/src/sentry/search/events/constants.py @@ -95,7 +95,16 @@ class ThresholdDict(TypedDict): DURATION_PATTERN = re.compile(r"(\d+\.?\d?)(\D{1,3})") -RESULT_TYPES = {"duration", "string", "number", "integer", "percentage", "percent_change", "date"} +RESULT_TYPES = { + "duration", + "string", + "number", + "integer", + "percentage", + "percent_change", + "date", + "rate", +} # event_search normalizes to bytes # based on https://getsentry.github.io/relay/relay_metrics/enum.InformationUnit.html SIZE_UNITS = { diff --git a/src/sentry/search/events/datasets/discover.py b/src/sentry/search/events/datasets/discover.py index 88ce88c6d280f5..77d1fe8b07f5d9 100644 --- a/src/sentry/search/events/datasets/discover.py +++ b/src/sentry/search/events/datasets/discover.py @@ -623,7 +623,7 @@ def function_converter(self) -> Mapping[str, SnQLFunction]: "divide", [Function("count", []), args["interval"]], alias ), optional_args=[IntervalDefault("interval", 1, None)], - default_result_type="number", + default_result_type="rate", ), SnQLFunction( "epm", @@ -633,7 +633,7 @@ def function_converter(self) -> Mapping[str, SnQLFunction]: alias, ), optional_args=[IntervalDefault("interval", 1, None)], - default_result_type="number", + default_result_type="rate", ), SnQLFunction( "compare_numeric_aggregate", diff --git a/src/sentry/search/events/datasets/metrics.py b/src/sentry/search/events/datasets/metrics.py index 96313735f6563e..092f72bcd0dc05 100644 --- a/src/sentry/search/events/datasets/metrics.py +++ b/src/sentry/search/events/datasets/metrics.py @@ -530,7 +530,7 @@ def function_converter(self) -> Mapping[str, fields.MetricsFunction]: "epm", snql_distribution=self._resolve_epm, optional_args=[fields.IntervalDefault("interval", 1, None)], - default_result_type="number", + default_result_type="rate", ), fields.MetricsFunction( "floored_epm", @@ -573,13 +573,13 @@ def function_converter(self) -> Mapping[str, fields.MetricsFunction]: alias, ), optional_args=[fields.IntervalDefault("interval", 1, None)], - default_result_type="number", + default_result_type="rate", ), fields.MetricsFunction( "eps", snql_distribution=self._resolve_eps, optional_args=[fields.IntervalDefault("interval", 1, None)], - default_result_type="number", + default_result_type="rate", ), fields.MetricsFunction( "failure_count", diff --git a/src/sentry/search/events/datasets/metrics_layer.py b/src/sentry/search/events/datasets/metrics_layer.py index 63dcb23f3489b8..deb8cf976fe48b 100644 --- a/src/sentry/search/events/datasets/metrics_layer.py +++ b/src/sentry/search/events/datasets/metrics_layer.py @@ -366,7 +366,7 @@ def function_converter(self) -> Mapping[str, fields.MetricsFunction]: alias, ), optional_args=[fields.IntervalDefault("interval", 1, None)], - default_result_type="number", + default_result_type="rate", ), fields.MetricsFunction( "eps", @@ -380,7 +380,7 @@ def function_converter(self) -> Mapping[str, fields.MetricsFunction]: alias, ), optional_args=[fields.IntervalDefault("interval", 1, None)], - default_result_type="number", + default_result_type="rate", ), fields.MetricsFunction( "failure_count", diff --git a/src/sentry/search/events/datasets/spans_indexed.py b/src/sentry/search/events/datasets/spans_indexed.py index cc08b46433d0fa..c04ddec8fa3462 100644 --- a/src/sentry/search/events/datasets/spans_indexed.py +++ b/src/sentry/search/events/datasets/spans_indexed.py @@ -155,7 +155,7 @@ def function_converter(self) -> Mapping[str, SnQLFunction]: "divide", [Function("count", []), args["interval"]], alias ), optional_args=[IntervalDefault("interval", 1, None)], - default_result_type="number", + default_result_type="rate", ), SnQLFunction( "epm", @@ -165,7 +165,7 @@ def function_converter(self) -> Mapping[str, SnQLFunction]: alias, ), optional_args=[IntervalDefault("interval", 1, None)], - default_result_type="number", + default_result_type="rate", ), ] } diff --git a/src/sentry/search/events/datasets/spans_metrics.py b/src/sentry/search/events/datasets/spans_metrics.py index f4615cea916255..e374467fca4df6 100644 --- a/src/sentry/search/events/datasets/spans_metrics.py +++ b/src/sentry/search/events/datasets/spans_metrics.py @@ -84,13 +84,13 @@ def function_converter(self) -> Mapping[str, fields.MetricsFunction]: "epm", snql_distribution=self._resolve_epm, optional_args=[fields.IntervalDefault("interval", 1, None)], - default_result_type="number", + default_result_type="rate", ), fields.MetricsFunction( "eps", snql_distribution=self._resolve_eps, optional_args=[fields.IntervalDefault("interval", 1, None)], - default_result_type="number", + default_result_type="rate", ), fields.MetricsFunction( "count", diff --git a/tests/snuba/api/endpoints/test_organization_events.py b/tests/snuba/api/endpoints/test_organization_events.py index 87101fae6dd464..3b62a45f30d31c 100644 --- a/tests/snuba/api/endpoints/test_organization_events.py +++ b/tests/snuba/api/endpoints/test_organization_events.py @@ -2522,6 +2522,9 @@ def test_epm_function(self): assert data[0]["epm()"] == 0.5 assert data[1]["transaction"] == event2.transaction assert data[1]["epm()"] == 0.5 + meta = response.data["meta"] + assert meta["fields"]["epm()"] == "rate" + assert meta["units"]["epm()"] == "1/minute" def test_nonexistent_fields(self): self.store_event( diff --git a/tests/snuba/api/endpoints/test_organization_events_mep.py b/tests/snuba/api/endpoints/test_organization_events_mep.py index b21ebec27b33d9..7eeb90c5bbfdb4 100644 --- a/tests/snuba/api/endpoints/test_organization_events_mep.py +++ b/tests/snuba/api/endpoints/test_organization_events_mep.py @@ -161,7 +161,7 @@ def test_project_name(self): assert meta["isMetricsData"] assert field_meta["project.name"] == "string" assert field_meta["environment"] == "string" - assert field_meta["epm()"] == "number" + assert field_meta["epm()"] == "rate" def test_project_id(self): self.store_transaction_metric( @@ -190,7 +190,7 @@ def test_project_id(self): assert meta["isMetricsData"] assert field_meta["project_id"] == "integer" assert field_meta["environment"] == "string" - assert field_meta["epm()"] == "number" + assert field_meta["epm()"] == "rate" def test_project_dot_id(self): self.store_transaction_metric( @@ -219,7 +219,7 @@ def test_project_dot_id(self): assert meta["isMetricsData"] assert field_meta["project.id"] == "integer" assert field_meta["environment"] == "string" - assert field_meta["epm()"] == "number" + assert field_meta["epm()"] == "rate" def test_title_alias(self): """title is an alias to transaction name""" @@ -500,6 +500,9 @@ def test_performance_homepage_query(self): assert field_meta["user_misery()"] == "number" assert field_meta["failure_rate()"] == "percentage" assert field_meta["failure_count()"] == "integer" + assert field_meta["tpm()"] == "rate" + + assert meta["units"]["tpm()"] == "1/minute" def test_user_misery_and_team_key_sort(self): self.store_transaction_metric( diff --git a/tests/snuba/api/endpoints/test_organization_events_span_metrics.py b/tests/snuba/api/endpoints/test_organization_events_span_metrics.py index 2d91bb90702aa7..04aeff97de7f87 100644 --- a/tests/snuba/api/endpoints/test_organization_events_span_metrics.py +++ b/tests/snuba/api/endpoints/test_organization_events_span_metrics.py @@ -194,6 +194,10 @@ def test_eps(self): assert len(data) == 1 assert data[0]["eps()"] == 0.01 assert data[0]["sps()"] == 0.01 + assert meta["fields"]["eps()"] == "rate" + assert meta["fields"]["sps()"] == "rate" + assert meta["units"]["eps()"] == "1/second" + assert meta["units"]["sps()"] == "1/second" assert meta["dataset"] == "spansMetrics" def test_epm(self): @@ -218,6 +222,10 @@ def test_epm(self): assert len(data) == 1 assert data[0]["epm()"] == 0.6 assert data[0]["spm()"] == 0.6 + assert meta["fields"]["epm()"] == "rate" + assert meta["fields"]["spm()"] == "rate" + assert meta["units"]["epm()"] == "1/minute" + assert meta["units"]["spm()"] == "1/minute" assert meta["dataset"] == "spansMetrics" def test_time_spent_percentage(self): From b3f633423d6c753c99bbf0d8e476c85e138b071e Mon Sep 17 00:00:00 2001 From: Alberto Leal Date: Mon, 17 Jul 2023 15:15:22 -0400 Subject: [PATCH 38/67] chore(hybrid-cloud): Add tags to debug Identity service (#52997) --- src/sentry/integrations/base.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/sentry/integrations/base.py b/src/sentry/integrations/base.py index 9fd32c0235a981..c611b49c40a6c9 100644 --- a/src/sentry/integrations/base.py +++ b/src/sentry/integrations/base.py @@ -41,6 +41,7 @@ UnsupportedResponseType, ) from sentry.utils.audit import create_audit_entry +from sentry.utils.sdk import configure_scope if TYPE_CHECKING: from sentry.services.hybrid_cloud.integration import RpcOrganizationIntegration @@ -364,6 +365,10 @@ def get_default_identity(self) -> RpcIdentity: filter={"id": self.org_integration.default_auth_id} ) if identity is None: + with configure_scope() as scope: + scope.set_tag("integration_provider", self.model.get_provider().name) + scope.set_tag("org_integration_id", self.org_integration.id) + scope.set_tag("default_auth_id", self.org_integration.default_auth_id) raise Identity.DoesNotExist return identity From 08c834287b766f11fc0468c6c93886b6f76d4ebf Mon Sep 17 00:00:00 2001 From: Ian Woodard <17186604+IanWoodard@users.noreply.github.com> Date: Mon, 17 Jul 2023 12:26:21 -0700 Subject: [PATCH 39/67] ref(tsc): usageChart utils to tsx (#52935) Convert utils.spec.jsx to tsx --- .../usageChart/{utils.spec.jsx => utils.spec.tsx} | 2 +- static/app/views/organizationStats/usageChart/utils.tsx | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) rename static/app/views/organizationStats/usageChart/{utils.spec.jsx => utils.spec.tsx} (98%) diff --git a/static/app/views/organizationStats/usageChart/utils.spec.jsx b/static/app/views/organizationStats/usageChart/utils.spec.tsx similarity index 98% rename from static/app/views/organizationStats/usageChart/utils.spec.jsx rename to static/app/views/organizationStats/usageChart/utils.spec.tsx index 040cf1c4b0ea1a..c86c502334f0c5 100644 --- a/static/app/views/organizationStats/usageChart/utils.spec.jsx +++ b/static/app/views/organizationStats/usageChart/utils.spec.tsx @@ -13,7 +13,7 @@ describe('getDateFromMoment', () => { // Ensure date remains in UTC it('shows the date if interval is >= 24h', () => { expect(getDateFromMoment(start)).toBe('Jul 9'); - expect(getDateFromMoment(start, '7d')).toBe('Jul 9'); + expect(getDateFromMoment(start, '2d')).toBe('Jul 9'); expect(getDateFromMoment(moment('2021-10-31'))).toBe('Oct 31'); }); diff --git a/static/app/views/organizationStats/usageChart/utils.tsx b/static/app/views/organizationStats/usageChart/utils.tsx index 7d0454707ddee5..3ade8d0eafe1b7 100644 --- a/static/app/views/organizationStats/usageChart/utils.tsx +++ b/static/app/views/organizationStats/usageChart/utils.tsx @@ -49,8 +49,8 @@ export function getDateFromUnixTimestamp(timestamp: number) { } export function getXAxisDates( - dateStart: string, - dateEnd: string, + dateStart: moment.MomentInput, + dateEnd: moment.MomentInput, dateUtc: boolean = false, interval: IntervalPeriod = '1d' ): string[] { From b479b079337c00cf557de3152dc90678189a8a37 Mon Sep 17 00:00:00 2001 From: William Mak Date: Mon, 17 Jul 2023 15:47:09 -0400 Subject: [PATCH 40/67] chore(starfish): Add starfish referrers to the backend (#52754) - This adds all the starfish referrers to the backend --- .../api/endpoints/organization_events.py | 13 ++++++++ .../endpoints/organization_events_stats.py | 6 ++++ src/sentry/snuba/referrer.py | 31 +++++++++++++++++++ 3 files changed, 50 insertions(+) diff --git a/src/sentry/api/endpoints/organization_events.py b/src/sentry/api/endpoints/organization_events.py index 0edeef1488e0e9..59e996c746ade3 100644 --- a/src/sentry/api/endpoints/organization_events.py +++ b/src/sentry/api/endpoints/organization_events.py @@ -57,6 +57,19 @@ Referrer.API_TRACE_VIEW_ERRORS_VIEW.value, Referrer.API_TRACE_VIEW_HOVER_CARD.value, Referrer.API_ISSUES_ISSUE_EVENTS.value, + Referrer.API_STARFISH_ENDPOINT_LIST.value, + Referrer.API_STARFISH_GET_SPAN_ACTIONS.value, + Referrer.API_STARFISH_GET_SPAN_DOMAINS.value, + Referrer.API_STARFISH_GET_SPAN_OPERATIONS.value, + Referrer.API_STARFISH_SIDEBAR_SPAN_METRICS.value, + Referrer.API_STARFISH_SPAN_CATEGORY_BREAKDOWN.value, + Referrer.API_STARFISH_SPAN_LIST.value, + Referrer.API_STARFISH_SPAN_SUMMARY_P95.value, + Referrer.API_STARFISH_SPAN_SUMMARY_PAGE.value, + Referrer.API_STARFISH_SPAN_SUMMARY_PANEL.value, + Referrer.API_STARFISH_SPAN_SUMMARY_TRANSACTIONS.value, + Referrer.API_STARFISH_SPAN_TRANSACTION_METRICS.value, + Referrer.API_STARFISH_TOTAL_TIME.value, } ALLOWED_EVENTS_GEO_REFERRERS = { diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index c5eea994816c24..5dfd7968f8637e 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -54,6 +54,12 @@ Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_FROZEN_FRAMES_AREA.value, Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_SLOW_FRAMES.value, Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_FROZEN_FRAMES.value, + Referrer.API_STARFISH_SPAN_CATEGORY_BREAKDOWN_CHART.value, + Referrer.API_STARFISH_ENDPOINT_OVERVIEW.value, + Referrer.API_STARFISH_HTTP_ERROR_COUNT.value, + Referrer.API_STARFISH_SPAN_SUMMARY_PAGE_CHART.value, + Referrer.API_STARFISH_SIDEBAR_SPAN_METRICS_CHART.value, + Referrer.API_STARFISH_SPAN_TIME_CHARTS.value, } diff --git a/src/sentry/snuba/referrer.py b/src/sentry/snuba/referrer.py index 48236b78186048..8b924169421871 100644 --- a/src/sentry/snuba/referrer.py +++ b/src/sentry/snuba/referrer.py @@ -50,6 +50,9 @@ class Referrer(Enum): API_DISCOVER_TOTAL_SUM_TRANSACTION_DURATION_FIELD = ( "api.discover.total-sum-transaction-duration-field" ) + API_DISCOVER_TOTAL_SUM_TRANSACTION_DURATION_FIELD_PRIMARY = ( + "api.discover.total-sum-transaction-duration-field.primary" + ) API_DISCOVER_DAILY_CHART = "api.discover.daily-chart" API_DISCOVER_DAILYTOP5_CHART_FIND_TOPN = "api.discover.dailytop5-chart.find-topn" API_DISCOVER_DAILYTOP5_CHART = "api.discover.dailytop5-chart" @@ -339,6 +342,34 @@ class Referrer(Enum): API_PROJECT_EVENTS = "api.project-events" API_RELEASES_RELEASE_DETAILS_CHART = "api.releases.release-details-chart" API_REPLAY_DETAILS_PAGE = "api.replay.details-page" + + API_STARFISH_ENDPOINT_LIST = "api.starfish.endpoint-list" + API_STARFISH_GET_SPAN_ACTIONS = "api.starfish.get-span-actions" + API_STARFISH_GET_SPAN_DOMAINS = "api.starfish.get-span-domains" + API_STARFISH_GET_SPAN_OPERATIONS = "api.starfish.get-span-operations" + API_STARFISH_SIDEBAR_SPAN_METRICS = "api.starfish.sidebar-span-metrics" + API_STARFISH_SPAN_CATEGORY_BREAKDOWN = "api.starfish-web-service.span-category-breakdown" + API_STARFISH_SPAN_LIST = "api.starfish.use-span-list" + API_STARFISH_SPAN_LIST_PRIMARY = "api.starfish.use-span-list.primary" + API_STARFISH_SPAN_SUMMARY_P95 = "api.starfish.span-summary-panel-samples-table-p95" + API_STARFISH_SPAN_SUMMARY_PAGE = "api.starfish.span-summary-page-metrics" + API_STARFISH_SPAN_SUMMARY_PANEL = "api.starfish.span-summary-panel-metrics" + API_STARFISH_SPAN_SUMMARY_TRANSACTIONS = ( + "api.starfish.span-summary-panel-samples-table-transactions" + ) + API_STARFISH_SPAN_TRANSACTION_METRICS = "api.starfish.span-transaction-metrics" + API_STARFISH_TOTAL_TIME = "api.starfish-web-service.total-time" + API_STARFISH_HOMEPAGE_CHART = "api.starfish-web-service.homepage-chart" + + API_STARFISH_SPAN_CATEGORY_BREAKDOWN_CHART = ( + "api.starfish-web-service.span-category-breakdown-timeseries" + ) + API_STARFISH_ENDPOINT_OVERVIEW = "api.starfish-web-service.starfish-endpoint-overview" + API_STARFISH_HTTP_ERROR_COUNT = "api.starfish.get-http-error-count" + API_STARFISH_SPAN_SUMMARY_PAGE_CHART = "api.starfish.span-summary-page-metrics-chart" + API_STARFISH_SIDEBAR_SPAN_METRICS_CHART = "api.starfish.sidebar-span-metrics-chart" + API_STARFISH_SPAN_TIME_CHARTS = "api.starfish.span-time-charts" + API_SPAN_SAMPLE_GET_BOUNDS = "api.spans.sample-get-bounds" API_SPAN_SAMPLE_GET_SPAN_IDS = "api.spans.sample-get-span-ids" API_SPAN_SAMPLE_GET_SPAN_DATA = "api.spans.sample-get-span-data" From 7e137727c954f7f7e2097124649cc5ad605a9772 Mon Sep 17 00:00:00 2001 From: William Mak Date: Mon, 17 Jul 2023 15:47:22 -0400 Subject: [PATCH 41/67] chore(starfish): Adding referrers to starfish (#52749) - Making all the starfish referrers consistent, either with `starfish` prefix or `starfish-web-service` - Adding a referrer to all the requests, will need to follow up and all these referrers to the backend allowlist --- static/app/views/starfish/queries/useSpanList.tsx | 2 +- static/app/views/starfish/queries/useSpanSamples.tsx | 2 +- .../views/starfish/queries/useSpanTransactionMetrics.tsx | 2 +- static/app/views/starfish/views/spanSummaryPage/index.tsx | 4 ++-- .../spanSummaryPage/sampleList/durationChart/index.tsx | 4 ++-- .../views/spanSummaryPage/sampleList/sampleInfo/index.tsx | 2 +- .../spanSummaryPage/sampleList/sampleTable/sampleTable.tsx | 4 ++-- static/app/views/starfish/views/spans/queries.tsx | 1 + .../views/starfish/views/spans/selectors/actionSelector.tsx | 1 + .../views/starfish/views/spans/selectors/domainSelector.tsx | 1 + .../views/spans/selectors/spanOperationSelector.tsx | 1 + static/app/views/starfish/views/spans/spanTimeCharts.tsx | 3 +++ static/app/views/starfish/views/spans/spansTable.tsx | 2 +- .../views/webServiceView/endpointOverview/index.tsx | 2 +- .../views/webServiceView/spanGroupBreakdownContainer.tsx | 6 +++--- .../views/starfish/views/webServiceView/starfishView.tsx | 2 +- 16 files changed, 23 insertions(+), 16 deletions(-) diff --git a/static/app/views/starfish/queries/useSpanList.tsx b/static/app/views/starfish/queries/useSpanList.tsx index 3387dcac13e30f..136e6b6b143184 100644 --- a/static/app/views/starfish/queries/useSpanList.tsx +++ b/static/app/views/starfish/queries/useSpanList.tsx @@ -36,7 +36,7 @@ export const useSpanList = ( spanCategory?: string, sorts?: Sort[], limit?: number, - referrer = 'use-span-list', + referrer = 'api.starfish.use-span-list', cursor?: string ) => { const location = useLocation(); diff --git a/static/app/views/starfish/queries/useSpanSamples.tsx b/static/app/views/starfish/queries/useSpanSamples.tsx index 2587f7321e4b25..4c7693f0c05a50 100644 --- a/static/app/views/starfish/queries/useSpanSamples.tsx +++ b/static/app/views/starfish/queries/useSpanSamples.tsx @@ -48,7 +48,7 @@ export const useSpanSamples = (options: Options) => { {group: groupId}, {transactionName, 'transaction.method': transactionMethod}, [`p95(${SPAN_SELF_TIME})`], - 'sidebar-span-metrics' + 'api.starfish.sidebar-span-metrics' ); const maxYValue = computeAxisMax([spanMetricsSeriesData?.[`p95(${SPAN_SELF_TIME})`]]); diff --git a/static/app/views/starfish/queries/useSpanTransactionMetrics.tsx b/static/app/views/starfish/queries/useSpanTransactionMetrics.tsx index 97ff3d96c69706..9dc1f972455053 100644 --- a/static/app/views/starfish/queries/useSpanTransactionMetrics.tsx +++ b/static/app/views/starfish/queries/useSpanTransactionMetrics.tsx @@ -28,7 +28,7 @@ export type SpanTransactionMetrics = { export const useSpanTransactionMetrics = ( span: Pick, options: {sorts?: Sort[]; transactions?: string[]}, - _referrer = 'span-transaction-metrics' + _referrer = 'api.starfish.span-transaction-metrics' ) => { const location = useLocation(); diff --git a/static/app/views/starfish/views/spanSummaryPage/index.tsx b/static/app/views/starfish/views/spanSummaryPage/index.tsx index 4f24403848127d..0bccd49f56914b 100644 --- a/static/app/views/starfish/views/spanSummaryPage/index.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/index.tsx @@ -90,7 +90,7 @@ function SpanSummaryPage({params, location}: Props) { 'time_spent_percentage()', 'http_error_count()', ], - 'span-summary-page-metrics' + 'api.starfish.span-summary-page-metrics' ); const span = Object.assign({group: groupId}, spanMetrics as SpanMetrics & SpanMeta); @@ -100,7 +100,7 @@ function SpanSummaryPage({params, location}: Props) { {group: groupId}, queryFilter, [`p95(${SPAN_SELF_TIME})`, 'sps()', 'http_error_count()'], - 'span-summary-page-metrics' + 'api.starfish.span-summary-page-metrics-chart' ); useSynchronizeCharts([!areSpanMetricsSeriesLoading]); diff --git a/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/index.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/index.tsx index 2e4845c297e88f..a687da0dad5b63 100644 --- a/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/index.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/index.tsx @@ -67,14 +67,14 @@ function DurationChart({ {group: groupId}, {transactionName, 'transaction.method': transactionMethod}, [`p95(${SPAN_SELF_TIME})`], - 'sidebar-span-metrics' + 'api.starfish.sidebar-span-metrics-chart' ); const {data: spanMetrics, error: spanMetricsError} = useSpanMetrics( {group: groupId}, {transactionName, 'transaction.method': transactionMethod}, [`p95(${SPAN_SELF_TIME})`, SPAN_OP], - 'span-summary-panel-samples-table-p95' + 'api.starfish.span-summary-panel-samples-table-p95' ); const p95 = spanMetrics?.[`p95(${SPAN_SELF_TIME})`] || 0; diff --git a/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleInfo/index.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleInfo/index.tsx index bb21e5cac53848..4420a0ce3caefd 100644 --- a/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleInfo/index.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleInfo/index.tsx @@ -29,7 +29,7 @@ function SampleInfo(props: Props) { `p95(${SPAN_SELF_TIME})`, 'time_spent_percentage(local)', ], - 'span-summary-panel-metrics' + 'api.starfish.span-summary-panel-metrics' ); const style: CSSProperties = { diff --git a/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx index f01473cad4a8b0..ff9f187b105daa 100644 --- a/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx @@ -36,7 +36,7 @@ function SampleTable({ {group: groupId}, {transactionName, 'transaction.method': transactionMethod}, [`p95(${SPAN_SELF_TIME})`, SPAN_OP], - 'span-summary-panel-samples-table-p95' + 'api.starfish.span-summary-panel-samples-table-p95' ); const organization = useOrganization(); @@ -59,7 +59,7 @@ function SampleTable({ error: transactionError, } = useTransactions( spans.map(span => span['transaction.id']), - 'span-summary-panel-samples-table-transactions' + 'api.starfish.span-summary-panel-samples-table-transactions' ); const [loadedSpans, setLoadedSpans] = useState(false); diff --git a/static/app/views/starfish/views/spans/queries.tsx b/static/app/views/starfish/views/spans/queries.tsx index d024d98ec2319b..3a9836841ce554 100644 --- a/static/app/views/starfish/views/spans/queries.tsx +++ b/static/app/views/starfish/views/spans/queries.tsx @@ -34,6 +34,7 @@ export const useErrorRateQuery = (queryString: string) => { const result = useSpansQuery<{'http_error_count()': number; interval: number}[]>({ eventView, initialData: [], + referrer: 'api.starfish.get-http-error-count', }); const formattedData = result?.data?.map(entry => { diff --git a/static/app/views/starfish/views/spans/selectors/actionSelector.tsx b/static/app/views/starfish/views/spans/selectors/actionSelector.tsx index 5ef707f959ce3c..e614010458b2ec 100644 --- a/static/app/views/starfish/views/spans/selectors/actionSelector.tsx +++ b/static/app/views/starfish/views/spans/selectors/actionSelector.tsx @@ -36,6 +36,7 @@ export function ActionSelector({ eventView, initialData: [], enabled: !useHTTPActions, + referrer: 'api.starfish.get-span-actions', }); const options = useHTTPActions diff --git a/static/app/views/starfish/views/spans/selectors/domainSelector.tsx b/static/app/views/starfish/views/spans/selectors/domainSelector.tsx index 439db1fbfd8fa4..616cbf03653750 100644 --- a/static/app/views/starfish/views/spans/selectors/domainSelector.tsx +++ b/static/app/views/starfish/views/spans/selectors/domainSelector.tsx @@ -33,6 +33,7 @@ export function DomainSelector({ const {data: domains} = useSpansQuery<{'span.domain': string}[]>({ eventView, initialData: [], + referrer: 'api.starfish.get-span-domains', }); const options = [ diff --git a/static/app/views/starfish/views/spans/selectors/spanOperationSelector.tsx b/static/app/views/starfish/views/spans/selectors/spanOperationSelector.tsx index 4e93e3443e538b..77ae24bfe622cf 100644 --- a/static/app/views/starfish/views/spans/selectors/spanOperationSelector.tsx +++ b/static/app/views/starfish/views/spans/selectors/spanOperationSelector.tsx @@ -32,6 +32,7 @@ export function SpanOperationSelector({ const {data: operations} = useSpansQuery<{'span.op': string}[]>({ eventView, initialData: [], + referrer: 'api.starfish.get-span-operations', }); const options = [ diff --git a/static/app/views/starfish/views/spans/spanTimeCharts.tsx b/static/app/views/starfish/views/spans/spanTimeCharts.tsx index b6fbca5a39eb7d..079d935f9e74a9 100644 --- a/static/app/views/starfish/views/spans/spanTimeCharts.tsx +++ b/static/app/views/starfish/views/spans/spanTimeCharts.tsx @@ -50,6 +50,7 @@ export function SpanTimeCharts({moduleName, appliedFilters, spanCategory}: Props const {isLoading} = useSpansQuery({ eventView, initialData: [], + referrer: 'api.starfish.span-time-charts', }); useSynchronizeCharts([!isLoading]); @@ -99,6 +100,7 @@ function ThroughputChart({moduleName, filters}: ChartProps): JSX.Element { >({ eventView, initialData: [], + referrer: 'api.starfish.span-time-charts', }); const dataByGroup = {[label]: data}; @@ -156,6 +158,7 @@ function DurationChart({moduleName, filters}: ChartProps): JSX.Element { >({ eventView, initialData: [], + referrer: 'api.starfish.span-time-charts', }); const dataByGroup = {[label]: data}; diff --git a/static/app/views/starfish/views/spans/spansTable.tsx b/static/app/views/starfish/views/spans/spansTable.tsx index 91d23a280de6c6..2ec75940782a29 100644 --- a/static/app/views/starfish/views/spans/spansTable.tsx +++ b/static/app/views/starfish/views/spans/spansTable.tsx @@ -92,7 +92,7 @@ export default function SpansTable({ spanCategory, [sort], limit, - 'use-span-list', + 'api.starfish.use-span-list', spansCursor ); diff --git a/static/app/views/starfish/views/webServiceView/endpointOverview/index.tsx b/static/app/views/starfish/views/webServiceView/endpointOverview/index.tsx index 44bb6cb2f81f38..618f179ef295b6 100644 --- a/static/app/views/starfish/views/webServiceView/endpointOverview/index.tsx +++ b/static/app/views/starfish/views/webServiceView/endpointOverview/index.tsx @@ -120,7 +120,7 @@ export default function EndpointOverview() { environment={eventView.environment} project={eventView.project} period={eventView.statsPeriod} - referrer="starfish-endpoint-overview" + referrer="api.starfish-web-service.starfish-endpoint-overview" start={eventView.start} end={eventView.end} organization={organization} diff --git a/static/app/views/starfish/views/webServiceView/spanGroupBreakdownContainer.tsx b/static/app/views/starfish/views/webServiceView/spanGroupBreakdownContainer.tsx index 764d753fb7ed45..ae96f177c50828 100644 --- a/static/app/views/starfish/views/webServiceView/spanGroupBreakdownContainer.tsx +++ b/static/app/views/starfish/views/webServiceView/spanGroupBreakdownContainer.tsx @@ -72,7 +72,7 @@ export function SpanGroupBreakdownContainer({transaction, transactionMethod}: Pr ['span.category'] ), orgSlug: organization.slug, - referrer: 'starfish-web-service.span-category-breakdown', + referrer: 'api.starfish-web-service.span-category-breakdown', location, limit: 4, }); @@ -86,7 +86,7 @@ export function SpanGroupBreakdownContainer({transaction, transactionMethod}: Pr [] ), orgSlug: organization.slug, - referrer: 'starfish-web-service.total-time', + referrer: 'api.starfish-web-service.total-time', location, }); @@ -105,7 +105,7 @@ export function SpanGroupBreakdownContainer({transaction, transactionMethod}: Pr true ), enabled: true, - referrer: 'starfish-web-service.span-category-breakdown-timeseries', + referrer: 'api.starfish-web-service.span-category-breakdown-timeseries', initialData: [], }); diff --git a/static/app/views/starfish/views/webServiceView/starfishView.tsx b/static/app/views/starfish/views/webServiceView/starfishView.tsx index b8fa2b63465872..fa9bcbaba62f1e 100644 --- a/static/app/views/starfish/views/webServiceView/starfishView.tsx +++ b/static/app/views/starfish/views/webServiceView/starfishView.tsx @@ -64,7 +64,7 @@ export function StarfishView(props: BasePerformanceViewProps) { environment={eventView.environment} project={eventView.project} period={eventView.statsPeriod} - referrer="starfish-homepage-charts" + referrer="api.starfish-web-service.homepage-charts" start={eventView.start} end={eventView.end} organization={organization} From cafa5df2674e43a68f793fbdb94011162f0c0eda Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Mon, 17 Jul 2023 12:51:15 -0700 Subject: [PATCH 42/67] feat(grouping): Add metrics for issue merging and unmerging (#52919) This adds DataDog metrics for instances of issues being merged and unmerged. Both metrics include the issues' platform, and the merge metric additionally includes referer (since you can merge from either the issue stream or the Similar Issues tab). A few missing merge-related tests were also added. --- src/sentry/api/endpoints/group_hashes.py | 8 ++ src/sentry/api/helpers/group_index/update.py | 23 +++++ .../sentry/api/endpoints/test_group_hashes.py | 14 ++- tests/sentry/api/helpers/test_group_index.py | 98 ++++++++++++++++++- 4 files changed, 139 insertions(+), 4 deletions(-) diff --git a/src/sentry/api/endpoints/group_hashes.py b/src/sentry/api/endpoints/group_hashes.py index 85c2844cb914ed..79a37f0d1ec45b 100644 --- a/src/sentry/api/endpoints/group_hashes.py +++ b/src/sentry/api/endpoints/group_hashes.py @@ -10,6 +10,7 @@ from sentry.api.serializers import EventSerializer, serialize from sentry.models import GroupHash from sentry.tasks.unmerge import unmerge +from sentry.utils import metrics from sentry.utils.snuba import raw_query @@ -61,6 +62,13 @@ def delete(self, request: Request, group) -> Response: if not hash_list: return Response() + metrics.incr( + "grouping.unmerge_issues", + sample_rate=1.0, + # We assume that if someone's merged groups, they were all from the same platform + tags={"platform": group.platform or "unknown"}, + ) + unmerge.delay( group.project_id, group.id, None, hash_list, request.user.id if request.user else None ) diff --git a/src/sentry/api/helpers/group_index/update.py b/src/sentry/api/helpers/group_index/update.py index 068c6c9adb7b55..6e613110d01e63 100644 --- a/src/sentry/api/helpers/group_index/update.py +++ b/src/sentry/api/helpers/group_index/update.py @@ -1,8 +1,10 @@ from __future__ import annotations +import re from collections import defaultdict from datetime import datetime, timedelta from typing import Any, Dict, Mapping, MutableMapping, Sequence +from urllib.parse import urlparse import rest_framework from django.db import IntegrityError, transaction @@ -650,6 +652,27 @@ def update_groups( if len(projects) > 1: return Response({"detail": "Merging across multiple projects is not supported"}) + referer = urlparse(request.META.get("HTTP_REFERER", "")).path + issue_stream_regex = r"^(\/organizations\/[^\/]+)?\/issues\/$" + similar_issues_tab_regex = r"^(\/organizations\/[^\/]+)?\/issues\/\d+\/similar\/$" + + metrics.incr( + "grouping.merge_issues", + sample_rate=1.0, + tags={ + # We assume that if someone's merging groups, they're from the same platform + "platform": group_list[0].platform or "unknown", + # TODO: It's probably cleaner to just send this value from the front end + "referer": ( + "issue stream" + if re.search(issue_stream_regex, referer) + else "similar issues tab" + if re.search(similar_issues_tab_regex, referer) + else "unknown" + ), + }, + ) + result["merge"] = handle_merge(group_list, project_lookup, acting_user) inbox = result.get("inbox", None) diff --git a/tests/sentry/api/endpoints/test_group_hashes.py b/tests/sentry/api/endpoints/test_group_hashes.py index 9d263f8aa68935..08198364b011ed 100644 --- a/tests/sentry/api/endpoints/test_group_hashes.py +++ b/tests/sentry/api/endpoints/test_group_hashes.py @@ -1,4 +1,5 @@ import copy +from unittest.mock import patch from urllib.parse import urlencode from sentry.eventstream.snuba import SnubaEventStream @@ -94,7 +95,7 @@ def test_return_multiple_hashes(self): def test_unmerge(self): self.login_as(user=self.user) - group = self.create_group() + group = self.create_group(platform="javascript") hashes = [ GroupHash.objects.create(project=group.project, group=group, hash=hash) @@ -108,5 +109,12 @@ def test_unmerge(self): ] ) - response = self.client.delete(url, format="json") - assert response.status_code == 202, response.content + with patch("sentry.api.endpoints.group_hashes.metrics.incr") as mock_metrics_incr: + response = self.client.delete(url, format="json") + + assert response.status_code == 202, response.content + mock_metrics_incr.assert_any_call( + "grouping.unmerge_issues", + sample_rate=1.0, + tags={"platform": "javascript"}, + ) diff --git a/tests/sentry/api/helpers/test_group_index.py b/tests/sentry/api/helpers/test_group_index.py index fba4ff6b9ba7b0..8553b5ef2419c3 100644 --- a/tests/sentry/api/helpers/test_group_index.py +++ b/tests/sentry/api/helpers/test_group_index.py @@ -1,5 +1,5 @@ from datetime import datetime, timedelta -from unittest.mock import Mock, patch +from unittest.mock import MagicMock, Mock, patch import pytest from django.http import QueryDict @@ -264,6 +264,102 @@ def test_ignore_with_substatus_archived_until_escalating(self, send_robust: Mock assert not GroupInbox.objects.filter(group=group).exists() +class MergeGroupsTest(TestCase): + @patch("sentry.api.helpers.group_index.update.handle_merge") + def test_simple(self, mock_handle_merge: MagicMock): + group_ids = [self.create_group().id, self.create_group().id] + project = self.project + + request = self.make_request(method="PUT") + request.user = self.user + request.data = {"merge": 1} + request.GET = {"id": group_ids, "project": [project.id]} + + update_groups(request, group_ids, [project], self.organization.id, search_fn=Mock()) + + call_args = mock_handle_merge.call_args.args + + assert len(call_args) == 3 + # Have to convert to ids because first argument is a queryset + assert [group.id for group in call_args[0]] == group_ids + assert call_args[1] == {project.id: project} + assert call_args[2] == self.user + + @patch("sentry.api.helpers.group_index.update.handle_merge") + def test_multiple_projects(self, mock_handle_merge: MagicMock): + project1 = self.create_project() + project2 = self.create_project() + projects = [project1, project2] + project_ids = [project.id for project in projects] + + group_ids = [ + self.create_group(project1).id, + self.create_group(project2).id, + ] + + request = self.make_request(method="PUT") + request.user = self.user + request.data = {"merge": 1} + request.GET = {"id": group_ids, "project": project_ids} + + response = update_groups( + request, group_ids, projects, self.organization.id, search_fn=Mock() + ) + + assert response.data == {"detail": "Merging across multiple projects is not supported"} + assert mock_handle_merge.call_count == 0 + + def test_metrics(self): + for referer, expected_referer_tag in [ + ("https://sentry.io/organizations/dogsaregreat/issues/", "issue stream"), + ("https://dogsaregreat.sentry.io/issues/", "issue stream"), + ( + "https://sentry.io/organizations/dogsaregreat/issues/12311121/similar/", + "similar issues tab", + ), + ( + "https://dogsaregreat.sentry.io/issues/12311121/similar/", + "similar issues tab", + ), + ( + "https://sentry.io/organizations/dogsaregreat/some/other/path/", + "unknown", + ), + ( + "https://dogsaregreat.sentry.io/some/other/path/", + "unknown", + ), + ( + "", + "unknown", + ), + ]: + + group_ids = [ + self.create_group(platform="javascript").id, + self.create_group(platform="javascript").id, + ] + project = self.project + + request = self.make_request(method="PUT") + request.user = self.user + request.data = {"merge": 1} + request.GET = {"id": group_ids, "project": [project.id]} + request.META = {"HTTP_REFERER": referer} + + with patch("sentry.api.helpers.group_index.update.metrics.incr") as mock_metrics_incr: + update_groups(request, group_ids, [project], self.organization.id, search_fn=Mock()) + + mock_metrics_incr.assert_any_call( + "grouping.merge_issues", + sample_rate=1.0, + tags={ + "platform": "javascript", + "referer": expected_referer_tag, + }, + ) + + class TestHandleIsSubscribed(TestCase): def setUp(self) -> None: self.group = self.create_group() From c0c3a28c211590bb4b0dc823f023fe18dc385a74 Mon Sep 17 00:00:00 2001 From: Richard Ortenberg Date: Mon, 17 Jul 2023 13:00:04 -0700 Subject: [PATCH 43/67] fix(crons): Properly checks for environment name lengths (#52820) Safety check for environment name lengths --- .../monitors/consumers/monitor_consumer.py | 8 ++++++++ .../endpoints/monitor_ingest_checkin_index.py | 3 ++- src/sentry/monitors/models.py | 7 +++++++ .../test_monitor_ingest_checkin_index.py | 19 +++++++++++++++++++ .../sentry/monitors/test_monitor_consumer.py | 15 +++++++++++++++ 5 files changed, 51 insertions(+), 1 deletion(-) diff --git a/src/sentry/monitors/consumers/monitor_consumer.py b/src/sentry/monitors/consumers/monitor_consumer.py index a74ce7ae713fcb..4c0928c68ae8d6 100644 --- a/src/sentry/monitors/consumers/monitor_consumer.py +++ b/src/sentry/monitors/consumers/monitor_consumer.py @@ -24,6 +24,7 @@ MonitorCheckIn, MonitorEnvironment, MonitorEnvironmentLimitsExceeded, + MonitorEnvironmentValidationFailed, MonitorLimitsExceeded, MonitorType, ) @@ -283,6 +284,13 @@ def update_existing_check_in( ) logger.debug("monitor environment exceeds limits for monitor: %s", monitor_slug) return + except MonitorEnvironmentValidationFailed: + metrics.incr( + "monitors.checkin.result", + tags={**metric_kwargs, "status": "failed_monitor_environment_name_length"}, + ) + logger.debug("monitor environment name too long: %s %s", monitor_slug, environment) + return status = getattr(CheckInStatus, validated_params["status"].upper()) diff --git a/src/sentry/monitors/endpoints/monitor_ingest_checkin_index.py b/src/sentry/monitors/endpoints/monitor_ingest_checkin_index.py index 649b9b522bf9f9..dfaa2274cf68ff 100644 --- a/src/sentry/monitors/endpoints/monitor_ingest_checkin_index.py +++ b/src/sentry/monitors/endpoints/monitor_ingest_checkin_index.py @@ -28,6 +28,7 @@ MonitorCheckIn, MonitorEnvironment, MonitorEnvironmentLimitsExceeded, + MonitorEnvironmentValidationFailed, MonitorLimitsExceeded, ) from sentry.monitors.serializers import MonitorCheckInSerializerResponse @@ -185,7 +186,7 @@ def post( monitor_environment = MonitorEnvironment.objects.ensure_environment( project, monitor, result.get("environment") ) - except MonitorEnvironmentLimitsExceeded as e: + except (MonitorEnvironmentLimitsExceeded, MonitorEnvironmentValidationFailed) as e: return self.respond({type(e).__name__: str(e)}, status=403) # Infer the original start time of the check-in from the duration. diff --git a/src/sentry/monitors/models.py b/src/sentry/monitors/models.py index e12b4605db4351..8c73406e1f411d 100644 --- a/src/sentry/monitors/models.py +++ b/src/sentry/monitors/models.py @@ -79,6 +79,10 @@ class MonitorEnvironmentLimitsExceeded(Exception): pass +class MonitorEnvironmentValidationFailed(Exception): + pass + + def get_next_schedule(last_checkin, schedule_type, schedule): if schedule_type == ScheduleType.CRONTAB: itr = croniter(schedule, last_checkin) @@ -447,6 +451,9 @@ def ensure_environment( if not environment_name: environment_name = "production" + if not Environment.is_valid_name(environment_name): + raise MonitorEnvironmentValidationFailed("Environment name too long") + # TODO: assume these objects exist once backfill is completed environment = Environment.get_or_create(project=project, name=environment_name) diff --git a/tests/sentry/monitors/endpoints/test_monitor_ingest_checkin_index.py b/tests/sentry/monitors/endpoints/test_monitor_ingest_checkin_index.py index 589bb36a1931f0..f9db864ed354cd 100644 --- a/tests/sentry/monitors/endpoints/test_monitor_ingest_checkin_index.py +++ b/tests/sentry/monitors/endpoints/test_monitor_ingest_checkin_index.py @@ -359,6 +359,25 @@ def test_monitor_environment_creation_over_limit(self): assert resp.status_code == 403 assert "MonitorEnvironmentLimitsExceeded" in resp.data.keys() + def test_monitor_environment_validation(self): + for i, path_func in enumerate(self._get_path_functions()): + slug = f"my-new-monitor-{i}" + path = path_func(slug) + + invalid_name = "x" * 65 + + resp = self.client.post( + path, + { + "status": "ok", + "monitor_config": {"schedule_type": "crontab", "schedule": "5 * * * *"}, + "environment": f"environment-{invalid_name}", + }, + **self.dsn_auth_headers, + ) + assert resp.status_code == 403 + assert "MonitorEnvironmentValidationFailed" in resp.data.keys() + def test_with_dsn_auth_and_guid(self): for path_func in self._get_path_functions(): monitor = self._create_monitor() diff --git a/tests/sentry/monitors/test_monitor_consumer.py b/tests/sentry/monitors/test_monitor_consumer.py index f0f8ed1df27f35..9ed9685832149d 100644 --- a/tests/sentry/monitors/test_monitor_consumer.py +++ b/tests/sentry/monitors/test_monitor_consumer.py @@ -468,6 +468,21 @@ def test_monitor_environment_limits(self): monitor_environments = MonitorEnvironment.objects.filter(monitor=monitor) assert len(monitor_environments) == settings.MAX_ENVIRONMENTS_PER_MONITOR + def test_monitor_environment_validation(self): + invalid_name = "x" * 65 + + self.send_message( + "my-monitor", + monitor_config={"schedule": {"type": "crontab", "value": "13 * * * *"}}, + environment=f"my-environment-{invalid_name}", + ) + + monitor = Monitor.objects.get(slug="my-monitor") + assert monitor is not None + + monitor_environments = MonitorEnvironment.objects.filter(monitor=monitor) + assert len(monitor_environments) == 0 + def test_organization_killswitch(self): monitor = self._create_monitor(slug="my-monitor") From e558a85278adb274dd937d14405198176551ea78 Mon Sep 17 00:00:00 2001 From: Richard Ortenberg Date: Mon, 17 Jul 2023 13:05:10 -0700 Subject: [PATCH 44/67] ref(crons): Change to uuid validator for trace (#52561) --- src/sentry/monitors/validators.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/monitors/validators.py b/src/sentry/monitors/validators.py index c6abf14168e3c2..0da7b12880e86c 100644 --- a/src/sentry/monitors/validators.py +++ b/src/sentry/monitors/validators.py @@ -226,7 +226,7 @@ def create(self, validated_data): class TraceContextValidator(serializers.Serializer): - trace_id = serializers.CharField(max_length=32) + trace_id = serializers.UUIDField(format="hex") class ContextsValidator(serializers.Serializer): From 14da39a3150c201fd9816bfbaa961b496ceccf61 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Mon, 17 Jul 2023 13:29:36 -0700 Subject: [PATCH 45/67] fix(replays): fix bin/mock-replays script (#52927) Closes getsentry/team-replay#115 bin/mock-replays script now runs successfully locally so you can see an empty replay in your local environment upon running `sentry devserver` --- bin/mock-replay | 68 +++++++++++++++++++++++++++++++++++++------------ 1 file changed, 52 insertions(+), 16 deletions(-) diff --git a/bin/mock-replay b/bin/mock-replay index 8f8dcd8bd0b7a9..799d6fab91219d 100755 --- a/bin/mock-replay +++ b/bin/mock-replay @@ -1,18 +1,29 @@ #!/usr/bin/env python +from io import BytesIO +from zlib import compress + from sentry.runner import configure +from sentry.utils.json import dumps_htmlsafe configure() -import datetime import pathlib import uuid +from datetime import datetime, timedelta import click import requests from django.conf import settings -from sentry.models import File, Organization, Project +from sentry.models import File, Organization, Project, Team from sentry.replays.models import ReplayRecordingSegment -from sentry.replays.testutils import mock_replay +from sentry.replays.testutils import ( + mock_replay, + mock_rrweb_div_helloworld, + mock_segment_console, + mock_segment_fullsnapshot, + mock_segment_init, + mock_segment_nagivation, +) def store_replay(replay): @@ -22,16 +33,28 @@ def store_replay(replay): assert response.status_code == 200 -def create_recording_segment(replay_id, project_id, filename, segment_id): - with open(filename, "rb") as f: - file = File.objects.create(name=filename, type="application/octet-stream") - file.putfile(f) +def create_recording(replay_id, project_id, timestamp): + segments = [ + mock_segment_init(timestamp), + mock_segment_fullsnapshot(timestamp, [mock_rrweb_div_helloworld()]), + mock_segment_console(timestamp), + mock_segment_nagivation(timestamp + timedelta(seconds=1), hrefFrom="/", hrefTo="/home/"), + mock_segment_nagivation( + timestamp + timedelta(seconds=2), hrefFrom="/home/", hrefTo="/profile/" + ), + ] + for (segment_id, segment) in enumerate(segments): + store_replay_segments(replay_id, project_id, segment_id, segment) + +def store_replay_segments(replay_id: str, project_id: str, segment_id: int, segment): + f = File.objects.create(name="rr:{segment_id}", type="replay.recording") + f.putfile(BytesIO(compress(dumps_htmlsafe(segment).encode()))) ReplayRecordingSegment.objects.create( - replay_id=replay_id.replace("-", ""), + replay_id=replay_id, project_id=project_id, segment_id=segment_id, - file_id=file.id, + file_id=f.id, ) @@ -41,7 +64,13 @@ def make_filename(filename: str) -> str: def main(): - project_name = "Replay Test" + project_name = "Replay Test Project" + + if not settings.SENTRY_FEATURES["organizations:session-replay"]: + click.echo( + 'Session Replays is currently turned off! \nTo enable, add the following line to your local sentry.conf.py file: \nSENTRY_FEATURES["organizations:session-replay"] = True' + ) + exit() if settings.SENTRY_SINGLE_ORGANIZATION: org = Organization.get_default() @@ -51,25 +80,32 @@ def main(): org, _ = Organization.objects.get_or_create(slug="default") click.echo(f" > Mocking project {project_name}") + + team, _ = Team.objects.get_or_create( + organization=org, slug="sentry", defaults={"name": "Sentry"} + ) + project, _ = Project.objects.get_or_create( name=project_name, defaults={ "organization": org, "flags": Project.flags.has_replays, }, + platform="javascript", ) + project.add_team(team) + replay_id = uuid.uuid4().hex - seq1_timestamp = datetime.datetime.now() - datetime.timedelta(seconds=22) - seq2_timestamp = datetime.datetime.now() - datetime.timedelta(seconds=5) + seq1_timestamp = datetime.now() - timedelta(seconds=22) + seq2_timestamp = datetime.now() - timedelta(seconds=5) - click.echo("Creating Clickhouse entries...") + click.echo("Creating Replay events entries...") store_replay(mock_replay(seq1_timestamp, project.id, replay_id, segment_id=0)) store_replay(mock_replay(seq2_timestamp, project.id, replay_id, segment_id=1)) - click.echo("Creating Postgres entries...") - create_recording_segment(replay_id, project.id, make_filename("rrweb-1658770770892.json"), 0) - create_recording_segment(replay_id, project.id, make_filename("rrweb-1658770772903.json"), 1) + click.echo("Creating Replay recording entries...") + create_recording(replay_id, project.id, seq1_timestamp) if __name__ == "__main__": From ab1800732433938b5dcecb769557e2dd2367c586 Mon Sep 17 00:00:00 2001 From: Alex Zaslavsky Date: Mon, 17 Jul 2023 13:39:24 -0700 Subject: [PATCH 46/67] test(backup): Add more model tests (#52923) Add a number of tests for the exportable Organization*, Project*, User*, and Team models. Issue: getsentry/team-ospo#156 --- tests/sentry/backup/test_models.py | 99 +++++++++++++++++++++++++++++- 1 file changed, 97 insertions(+), 2 deletions(-) diff --git a/tests/sentry/backup/test_models.py b/tests/sentry/backup/test_models.py index 3e657b89369b6c..18f2fa21a1f13e 100644 --- a/tests/sentry/backup/test_models.py +++ b/tests/sentry/backup/test_models.py @@ -1,11 +1,13 @@ from __future__ import annotations import tempfile +from datetime import datetime from pathlib import Path from typing import Type from click.testing import CliRunner from django.core.management import call_command +from django.utils import timezone from sentry.incidents.models import ( AlertRule, @@ -21,8 +23,26 @@ DashboardWidgetQuery, DashboardWidgetTypes, ) -from sentry.models.environment import Environment +from sentry.models.environment import Environment, EnvironmentProject +from sentry.models.options.project_option import ProjectOption +from sentry.models.options.user_option import UserOption from sentry.models.organization import Organization +from sentry.models.organizationaccessrequest import OrganizationAccessRequest +from sentry.models.organizationmapping import OrganizationMapping +from sentry.models.organizationmember import OrganizationMember +from sentry.models.organizationmemberteam import OrganizationMemberTeam +from sentry.models.project import Project +from sentry.models.projectbookmark import ProjectBookmark +from sentry.models.projectkey import ProjectKey +from sentry.models.projectownership import ProjectOwnership +from sentry.models.projectredirect import ProjectRedirect +from sentry.models.projectteam import ProjectTeam +from sentry.models.team import Team +from sentry.models.user import User +from sentry.models.useremail import UserEmail +from sentry.models.userip import UserIP +from sentry.models.userpermission import UserPermission +from sentry.models.userrole import UserRole, UserRoleUser from sentry.monitors.models import Monitor, MonitorEnvironment, MonitorType, ScheduleType from sentry.runner.commands.backup import import_, validate from sentry.silo import unguarded_write @@ -169,6 +189,13 @@ def test_environment(self): self.create_environment() return self.import_export_then_validate() + @targets_models(EnvironmentProject) + def test_environment_project(self): + env = self.create_environment() + project = self.create_project() + EnvironmentProject.objects.create(project=project, environment=env, is_hidden=False) + return self.import_export_then_validate() + @targets_models(Monitor) def test_monitor(self): self.create_monitor() @@ -184,8 +211,76 @@ def test_monitor_environment(self): ) return self.import_export_then_validate() - @targets_models(Organization) + @targets_models(Organization, OrganizationMapping) def test_organization(self): user = self.create_user() self.create_organization(owner=user) return self.import_export_then_validate() + + @targets_models(OrganizationAccessRequest, OrganizationMember, OrganizationMemberTeam, Team) + def test_organization_membership(self): + organization = self.create_organization(name="test_org", owner=self.user) + user = self.create_user("other@example.com") + member = self.create_member(organization=organization, user=user, role="member") + team = self.create_team(name="foo", organization=organization) + + self.create_team_membership(user=user, team=team) + OrganizationAccessRequest.objects.create(member=member, team=team) + return self.import_export_then_validate() + + @targets_models(Project, ProjectKey, ProjectOption, ProjectTeam) + def test_project(self): + self.create_project() + return self.import_export_then_validate() + + @targets_models(ProjectBookmark) + def test_project_bookmark(self): + user = self.create_user() + project = self.create_project() + self.create_project_bookmark(project=project, user=user) + return self.import_export_then_validate() + + @targets_models(ProjectKey) + def test_project_key(self): + project = self.create_project() + self.create_project_key(project) + return self.import_export_then_validate() + + @targets_models(ProjectOwnership) + def test_project_ownership(self): + project = self.create_project() + ProjectOwnership.objects.create( + project=project, raw='{"hello":"hello"}', schema={"hello": "hello"} + ) + return self.import_export_then_validate() + + @targets_models(ProjectRedirect) + def test_project_redirect(self): + project = self.create_project() + ProjectRedirect.record(project, "old_slug") + return self.import_export_then_validate() + + @targets_models(User, UserEmail, UserOption, UserPermission) + def test_user(self): + user = self.create_user() + self.add_user_permission(user, "users.admin") + UserOption.objects.create(user=user, key="timezone", value="Europe/Vienna") + return self.import_export_then_validate() + + @targets_models(UserIP) + def test_user_ip(self): + user = self.create_user() + UserIP.objects.create( + user=user, + ip_address="127.0.0.2", + first_seen=datetime(2012, 4, 5, 3, 29, 45, tzinfo=timezone.utc), + last_seen=datetime(2012, 4, 5, 3, 29, 45, tzinfo=timezone.utc), + ) + return self.import_export_then_validate() + + @targets_models(UserRole, UserRoleUser) + def test_user_role(self): + user = self.create_user() + role = UserRole.objects.create(name="test-role") + UserRoleUser.objects.create(user=user, role=role) + return self.import_export_then_validate() From e21c9ad6416c5e1f6edb0ebc14eb95eba17cf05e Mon Sep 17 00:00:00 2001 From: Armen Zambrano G <44410+armenzg@users.noreply.github.com> Date: Mon, 17 Jul 2023 16:46:44 -0400 Subject: [PATCH 47/67] ref(event_manager): Fix typing issues for event_manager (#52974) We want to make a lot of changes to event_manager and we need to have backend typing in place for the upcoming work. Fixes https://github.com/getsentry/sentry/issues/52877 --------- Co-authored-by: Armen Zambrano G --- pyproject.toml | 2 -- src/sentry/attachments/__init__.py | 2 +- src/sentry/event_manager.py | 41 ++++++++----------------- src/sentry/eventtypes/__init__.py | 12 ++++++++ src/sentry/grouping/result.py | 8 ++--- src/sentry/tsdb/__init__.py | 5 +-- tests/sentry/eventtypes/test_default.py | 2 +- 7 files changed, 33 insertions(+), 39 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index bfae3de90ebebe..17aba86dc76867 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -442,7 +442,6 @@ module = [ "sentry.discover.endpoints.discover_key_transactions", "sentry.discover.endpoints.serializers", "sentry.discover.tasks", - "sentry.event_manager", "sentry.eventstore.base", "sentry.eventstore.compressor", "sentry.eventstore.models", @@ -1156,7 +1155,6 @@ module = [ "tests.sentry.eventstore.test_models", "tests.sentry.eventstream.kafka.test_protocol", "tests.sentry.eventstream.test_eventstream", - "tests.sentry.eventtypes.test_default", "tests.sentry.eventtypes.test_error", "tests.sentry.features.test_manager", "tests.sentry.grouping.test_enhancer", diff --git a/src/sentry/attachments/__init__.py b/src/sentry/attachments/__init__.py index 64ad58c27512e4..e8ebbcc3cd055a 100644 --- a/src/sentry/attachments/__init__.py +++ b/src/sentry/attachments/__init__.py @@ -1,4 +1,4 @@ -__all__ = ["attachment_cache", "CachedAttachment"] +__all__ = ["attachment_cache", "CachedAttachment", "MissingAttachmentChunks"] from django.conf import settings diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index 67d7f198d16f63..b55272f355daf4 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -19,7 +19,6 @@ Optional, Sequence, Tuple, - Type, TypedDict, Union, cast, @@ -55,15 +54,8 @@ from sentry.culprit import generate_culprit from sentry.dynamic_sampling import LatestReleaseBias, LatestReleaseParams from sentry.eventstore.processing import event_processing_store -from sentry.eventtypes import ( - CspEvent, - DefaultEvent, - ErrorEvent, - ExpectCTEvent, - ExpectStapleEvent, - HpkpEvent, - TransactionEvent, -) +from sentry.eventtypes import EventType +from sentry.eventtypes.transaction import TransactionEvent from sentry.grouping.api import ( BackgroundGroupingConfigLoader, GroupingConfig, @@ -660,7 +652,7 @@ def save_error_events( with metrics.timer("event_manager.save_attachments"): save_attachments(cache_key, attachments, job) - metric_tags = {"from_relay": "_relay_processed" in job["data"]} + metric_tags = {"from_relay": str("_relay_processed" in job["data"])} metrics.timing( "events.latency", @@ -1260,13 +1252,15 @@ def _tsdb_record_all_metrics(jobs: Sequence[Job]) -> None: records.append((TSDBModel.users_affected_by_project, project_id, (user.tag_value,))) if incrs: - tsdb.incr_multi(incrs, timestamp=event.datetime, environment_id=environment.id) + tsdb.backend.incr_multi(incrs, timestamp=event.datetime, environment_id=environment.id) if records: - tsdb.record_multi(records, timestamp=event.datetime, environment_id=environment.id) + tsdb.backend.record_multi( + records, timestamp=event.datetime, environment_id=environment.id + ) if frequencies: - tsdb.record_frequency_multi(frequencies, timestamp=event.datetime) + tsdb.backend.record_frequency_multi(frequencies, timestamp=event.datetime) @metrics.wraps("save_event.nodestore_save_many") @@ -1438,17 +1432,6 @@ def _get_event_user_impl( return euser -EventType = Union[ - DefaultEvent, - ErrorEvent, - CspEvent, - HpkpEvent, - ExpectCTEvent, - ExpectStapleEvent, - TransactionEvent, -] - - def get_event_type(data: Mapping[str, Any]) -> EventType: return eventtypes.get(data.get("type", "default"))() @@ -1922,7 +1905,7 @@ def _process_existing_aggregate( return bool(is_regression) -Attachment = Type[CachedAttachment] +Attachment = CachedAttachment def discard_event(job: Job, attachments: Sequence[Attachment]) -> None: @@ -1938,7 +1921,7 @@ def discard_event(job: Job, attachments: Sequence[Attachment]) -> None: project = job["event"].project - quotas.refund( + quotas.backend.refund( project, key=job["project_key"], timestamp=job["start_time"], @@ -1975,7 +1958,7 @@ def discard_event(job: Job, attachments: Sequence[Attachment]) -> None: ) if attachment_quantity: - quotas.refund( + quotas.backend.refund( project, key=job["project_key"], timestamp=job["start_time"], @@ -2099,7 +2082,7 @@ def filter_attachments_for_group(attachments: list[Attachment], job: Job) -> lis cache.set(crashreports_key, max_crashreports, CRASH_REPORT_TIMEOUT) if refund_quantity: - quotas.refund( + quotas.backend.refund( project, key=job["project_key"], timestamp=job["start_time"], diff --git a/src/sentry/eventtypes/__init__.py b/src/sentry/eventtypes/__init__.py index 143f2e28eb93f2..c370aa787f3da2 100644 --- a/src/sentry/eventtypes/__init__.py +++ b/src/sentry/eventtypes/__init__.py @@ -1,3 +1,5 @@ +from typing import Union + from .base import DefaultEvent from .error import ErrorEvent from .generic import GenericEvent @@ -17,3 +19,13 @@ get = default_manager.get register = default_manager.register + +EventType = Union[ + DefaultEvent, + ErrorEvent, + CspEvent, + HpkpEvent, + ExpectCTEvent, + ExpectStapleEvent, + TransactionEvent, +] diff --git a/src/sentry/grouping/result.py b/src/sentry/grouping/result.py index 38fcfafac5a0f9..976545c46d2eb5 100644 --- a/src/sentry/grouping/result.py +++ b/src/sentry/grouping/result.py @@ -1,9 +1,9 @@ from dataclasses import dataclass from typing import Any, Dict, List, Optional, Sequence, TypedDict, Union +from sentry.db.models import NodeData from sentry.utils.safe import get_path, safe_execute, set_path -EventData = Dict[str, Any] EventMetadata = Dict[str, Any] @@ -63,7 +63,7 @@ def _strip_tree_label(tree_label: TreeLabel, truncate: bool = False) -> Stripped return rv -def _write_tree_labels(tree_labels: Sequence[Optional[TreeLabel]], event_data: EventData) -> None: +def _write_tree_labels(tree_labels: Sequence[Optional[TreeLabel]], event_data: NodeData) -> None: event_labels: List[Optional[StrippedTreeLabel]] = [] event_data["hierarchical_tree_labels"] = event_labels @@ -97,7 +97,7 @@ class CalculatedHashes: hierarchical_hashes: Sequence[str] tree_labels: Sequence[Optional[TreeLabel]] - def write_to_event(self, event_data: EventData) -> None: + def write_to_event(self, event_data: NodeData) -> None: event_data["hashes"] = self.hashes if self.hierarchical_hashes: @@ -106,7 +106,7 @@ def write_to_event(self, event_data: EventData) -> None: safe_execute(_write_tree_labels, self.tree_labels, event_data, _with_transaction=False) @classmethod - def from_event(cls, event_data: EventData) -> Optional["CalculatedHashes"]: + def from_event(cls, event_data: NodeData) -> Optional["CalculatedHashes"]: hashes = event_data.get("hashes") hierarchical_hashes = event_data.get("hierarchical_hashes") or [] tree_labels = event_data.get("hierarchical_tree_labels") or [] diff --git a/src/sentry/tsdb/__init__.py b/src/sentry/tsdb/__init__.py index 1aa88203fec695..847cf864aa0f7a 100644 --- a/src/sentry/tsdb/__init__.py +++ b/src/sentry/tsdb/__init__.py @@ -5,6 +5,7 @@ from .base import BaseTSDB from .dummy import DummyTSDB -LazyServiceWrapper( +backend = LazyServiceWrapper( BaseTSDB, settings.SENTRY_TSDB, settings.SENTRY_TSDB_OPTIONS, dangerous=[DummyTSDB] -).expose(locals()) +) +backend.expose(locals()) diff --git a/tests/sentry/eventtypes/test_default.py b/tests/sentry/eventtypes/test_default.py index 5c9443de01af85..de36b5c3820f72 100644 --- a/tests/sentry/eventtypes/test_default.py +++ b/tests/sentry/eventtypes/test_default.py @@ -1,4 +1,4 @@ -from sentry.eventtypes import DefaultEvent +from sentry.eventtypes.base import DefaultEvent from sentry.testutils import TestCase from sentry.testutils.silo import region_silo_test From 1dd34767bef788dd61eb08daf9047877cc187bcf Mon Sep 17 00:00:00 2001 From: Richard Ortenberg Date: Mon, 17 Jul 2023 13:54:18 -0700 Subject: [PATCH 48/67] feat(crons): Use timeout_at for timed out check-ins (#52570) Queries off of timestamp of `timeout_at` for in progress check-ins --- src/sentry/monitors/tasks.py | 26 +++++++++++++------------- tests/sentry/monitors/test_tasks.py | 8 ++++++++ 2 files changed, 21 insertions(+), 13 deletions(-) diff --git a/src/sentry/monitors/tasks.py b/src/sentry/monitors/tasks.py index 49972e3923baf4..749d78052c4a81 100644 --- a/src/sentry/monitors/tasks.py +++ b/src/sentry/monitors/tasks.py @@ -1,5 +1,4 @@ import logging -from datetime import timedelta from django.utils import timezone @@ -102,21 +101,13 @@ def check_monitors(current_datetime=None): except Exception: logger.exception("Exception in check_monitors - mark missed") - qs = MonitorCheckIn.objects.filter(status=CheckInStatus.IN_PROGRESS).select_related( - "monitor", "monitor_environment" - )[:CHECKINS_LIMIT] + qs = MonitorCheckIn.objects.filter( + status=CheckInStatus.IN_PROGRESS, timeout_at__lte=current_datetime + ).select_related("monitor", "monitor_environment")[:CHECKINS_LIMIT] metrics.gauge("sentry.monitors.tasks.check_monitors.timeout_count", qs.count()) # check for any monitors which are still running and have exceeded their maximum runtime for checkin in qs: try: - timeout = timedelta( - minutes=(checkin.monitor.config or {}).get("max_runtime") or TIMEOUT - ) - # Check against date_updated to allow monitors to run for longer as - # long as they continue to send heart beats updating the checkin - if checkin.date_updated > current_datetime - timeout: - continue - monitor_environment = checkin.monitor_environment logger.info( "monitor_environment.checkin-timeout", @@ -136,7 +127,16 @@ def check_monitors(current_datetime=None): if not has_newer_result: monitor_environment.mark_failed( reason=MonitorFailure.DURATION, - occurrence_context={"duration": (timeout.seconds // 60) % 60}, + occurrence_context={ + "duration": (checkin.monitor.config or {}).get("max_runtime") or TIMEOUT + }, ) except Exception: logger.exception("Exception in check_monitors - mark timeout") + + # safety check for check-ins stuck in the backlog + backlog_count = MonitorCheckIn.objects.filter( + status=CheckInStatus.IN_PROGRESS, timeout_at__isnull=True + ).count() + if backlog_count: + logger.exception(f"Exception in check_monitors - backlog count {backlog_count} is > 0") diff --git a/tests/sentry/monitors/test_tasks.py b/tests/sentry/monitors/test_tasks.py index 35fe8923463190..f5ef5113610135 100644 --- a/tests/sentry/monitors/test_tasks.py +++ b/tests/sentry/monitors/test_tasks.py @@ -206,6 +206,7 @@ def test_timeout_with_no_future_complete_checkin(self): status=CheckInStatus.IN_PROGRESS, date_added=check_in_24hr_ago, date_updated=check_in_24hr_ago, + timeout_at=check_in_24hr_ago + timedelta(minutes=30), ) # We started another checkin right now checkin2 = MonitorCheckIn.objects.create( @@ -215,6 +216,7 @@ def test_timeout_with_no_future_complete_checkin(self): status=CheckInStatus.IN_PROGRESS, date_added=next_checkin_ts, date_updated=next_checkin_ts, + timeout_at=next_checkin_ts + timedelta(minutes=30), ) assert checkin1.date_added == checkin1.date_updated == check_in_24hr_ago @@ -268,6 +270,7 @@ def test_timeout_with_future_complete_checkin(self): status=CheckInStatus.IN_PROGRESS, date_added=check_in_24hr_ago, date_updated=check_in_24hr_ago, + timeout_at=check_in_24hr_ago + timedelta(minutes=30), ) checkin2 = MonitorCheckIn.objects.create( monitor=monitor, @@ -276,6 +279,7 @@ def test_timeout_with_future_complete_checkin(self): status=CheckInStatus.OK, date_added=next_checkin_ts, date_updated=next_checkin_ts, + timeout_at=next_checkin_ts + timedelta(minutes=30), ) assert checkin1.date_added == checkin1.date_updated == check_in_24hr_ago @@ -321,6 +325,7 @@ def test_timeout_via_max_runtime_configuration(self): status=CheckInStatus.IN_PROGRESS, date_added=next_checkin_ts, date_updated=next_checkin_ts, + timeout_at=next_checkin_ts + timedelta(minutes=60), ) assert checkin.date_added == checkin.date_updated == next_checkin_ts @@ -424,6 +429,7 @@ def test_timeout_exception_handling(self, logger): status=CheckInStatus.IN_PROGRESS, date_added=check_in_24hr_ago, date_updated=check_in_24hr_ago, + timeout_at=check_in_24hr_ago + timedelta(minutes=30), ) # This monitor will be fine @@ -448,6 +454,7 @@ def test_timeout_exception_handling(self, logger): status=CheckInStatus.IN_PROGRESS, date_added=check_in_24hr_ago, date_updated=check_in_24hr_ago, + timeout_at=check_in_24hr_ago + timedelta(minutes=30), ) checkin2 = MonitorCheckIn.objects.create( monitor=monitor, @@ -456,6 +463,7 @@ def test_timeout_exception_handling(self, logger): status=CheckInStatus.IN_PROGRESS, date_added=next_checkin_ts, date_updated=next_checkin_ts, + timeout_at=next_checkin_ts + timedelta(minutes=30), ) assert checkin1.date_added == checkin1.date_updated == check_in_24hr_ago From 316f77a55b79a35cb202f83dcd1602e4f82fe4ae Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Mon, 17 Jul 2023 14:11:22 -0700 Subject: [PATCH 49/67] ref(sort): Change betterPriority to priority (#52910) Change references to the new priority sort to just "priority" for simplicity and to prevent more saved searches being created with the sort string "betterPriority". After this is merged we'll run a migration to make all saved searches for priority sort the same (https://github.com/getsentry/sentry/pull/52909) and then finish by cleaning up the backend to only accept "priority" (right now it treats betterPriority and priority the same". Step 1: This PR Step 2: Migration https://github.com/getsentry/sentry/pull/52909 Step 3: Backend PR https://github.com/getsentry/sentry/pull/52915 --- .../modals/savedSearchModal/createSavedSearchModal.spec.tsx | 2 +- .../modals/savedSearchModal/createSavedSearchModal.tsx | 2 +- .../modals/savedSearchModal/editSavedSearchModal.spec.tsx | 4 ++-- .../modals/savedSearchModal/savedSearchModalContent.tsx | 2 +- static/app/views/dashboards/datasetConfig/issues.tsx | 2 +- .../app/views/dashboards/widgetBuilder/issueWidget/utils.tsx | 2 +- static/app/views/issueList/actions/sortOptions.tsx | 4 ++-- static/app/views/issueList/utils.tsx | 4 ++-- 8 files changed, 11 insertions(+), 11 deletions(-) diff --git a/static/app/components/modals/savedSearchModal/createSavedSearchModal.spec.tsx b/static/app/components/modals/savedSearchModal/createSavedSearchModal.spec.tsx index e0f804699a26cb..7328cbb2a6a769 100644 --- a/static/app/components/modals/savedSearchModal/createSavedSearchModal.spec.tsx +++ b/static/app/components/modals/savedSearchModal/createSavedSearchModal.spec.tsx @@ -92,7 +92,7 @@ describe('CreateSavedSearchModal', function () { data: { name: 'new search name', query: 'is:resolved', - sort: IssueSortOptions.BETTER_PRIORITY, + sort: IssueSortOptions.PRIORITY, type: 0, visibility: SavedSearchVisibility.OWNER, }, diff --git a/static/app/components/modals/savedSearchModal/createSavedSearchModal.tsx b/static/app/components/modals/savedSearchModal/createSavedSearchModal.tsx index d937271b4874e4..294e65b4aa6b60 100644 --- a/static/app/components/modals/savedSearchModal/createSavedSearchModal.tsx +++ b/static/app/components/modals/savedSearchModal/createSavedSearchModal.tsx @@ -22,7 +22,7 @@ function validateSortOption({sort}: {sort?: string}) { const sortOptions = [ IssueSortOptions.DATE, IssueSortOptions.NEW, - IssueSortOptions.BETTER_PRIORITY, + IssueSortOptions.PRIORITY, IssueSortOptions.FREQ, IssueSortOptions.USER, ]; diff --git a/static/app/components/modals/savedSearchModal/editSavedSearchModal.spec.tsx b/static/app/components/modals/savedSearchModal/editSavedSearchModal.spec.tsx index 1fa8a931135ef4..ead6c9b951f136 100644 --- a/static/app/components/modals/savedSearchModal/editSavedSearchModal.spec.tsx +++ b/static/app/components/modals/savedSearchModal/editSavedSearchModal.spec.tsx @@ -56,7 +56,7 @@ describe('EditSavedSearchModal', function () { id: 'saved-search-id', name: 'test', query: 'is:unresolved browser:firefox', - sort: IssueSortOptions.BETTER_PRIORITY, + sort: IssueSortOptions.PRIORITY, visibility: SavedSearchVisibility.OWNER, }, }); @@ -97,7 +97,7 @@ describe('EditSavedSearchModal', function () { id: 'saved-search-id', name: 'test', query: 'is:unresolved browser:firefox', - sort: IssueSortOptions.BETTER_PRIORITY, + sort: IssueSortOptions.PRIORITY, visibility: SavedSearchVisibility.OWNER, }, }); diff --git a/static/app/components/modals/savedSearchModal/savedSearchModalContent.tsx b/static/app/components/modals/savedSearchModal/savedSearchModalContent.tsx index bece068e03fe3b..1cc96817abc312 100644 --- a/static/app/components/modals/savedSearchModal/savedSearchModalContent.tsx +++ b/static/app/components/modals/savedSearchModal/savedSearchModalContent.tsx @@ -23,7 +23,7 @@ export function SavedSearchModalContent({organization}: SavedSearchModalContentP const sortOptions = [ IssueSortOptions.DATE, IssueSortOptions.NEW, - IssueSortOptions.BETTER_PRIORITY, + IssueSortOptions.PRIORITY, IssueSortOptions.FREQ, IssueSortOptions.USER, ]; diff --git a/static/app/views/dashboards/datasetConfig/issues.tsx b/static/app/views/dashboards/datasetConfig/issues.tsx index 0b07df6c939acb..8a70c42018be51 100644 --- a/static/app/views/dashboards/datasetConfig/issues.tsx +++ b/static/app/views/dashboards/datasetConfig/issues.tsx @@ -73,7 +73,7 @@ function getTableSortOptions(_organization: Organization, _widgetQuery: WidgetQu const sortOptions = [ IssueSortOptions.DATE, IssueSortOptions.NEW, - IssueSortOptions.BETTER_PRIORITY, + IssueSortOptions.PRIORITY, IssueSortOptions.FREQ, IssueSortOptions.USER, ]; diff --git a/static/app/views/dashboards/widgetBuilder/issueWidget/utils.tsx b/static/app/views/dashboards/widgetBuilder/issueWidget/utils.tsx index 27fcd5e91ce376..d18489c304cd13 100644 --- a/static/app/views/dashboards/widgetBuilder/issueWidget/utils.tsx +++ b/static/app/views/dashboards/widgetBuilder/issueWidget/utils.tsx @@ -30,7 +30,7 @@ export const ISSUE_WIDGET_SORT_OPTIONS = [ IssueSortOptions.DATE, IssueSortOptions.NEW, IssueSortOptions.FREQ, - IssueSortOptions.BETTER_PRIORITY, + IssueSortOptions.PRIORITY, IssueSortOptions.USER, ]; diff --git a/static/app/views/issueList/actions/sortOptions.tsx b/static/app/views/issueList/actions/sortOptions.tsx index d5400b2df627f6..7d760e1ca466c3 100644 --- a/static/app/views/issueList/actions/sortOptions.tsx +++ b/static/app/views/issueList/actions/sortOptions.tsx @@ -19,7 +19,7 @@ function getSortTooltip(key: IssueSortOptions) { return t('When issue was flagged for review.'); case IssueSortOptions.NEW: return t('First time the issue occurred.'); - case IssueSortOptions.BETTER_PRIORITY: + case IssueSortOptions.PRIORITY: return t('Recent issues trending upward.'); case IssueSortOptions.FREQ: return t('Number of events.'); @@ -37,7 +37,7 @@ function IssueListSortOptions({onSelect, sort, query}: Props) { ...(FOR_REVIEW_QUERIES.includes(query || '') ? [IssueSortOptions.INBOX] : []), IssueSortOptions.DATE, IssueSortOptions.NEW, - IssueSortOptions.BETTER_PRIORITY, + IssueSortOptions.PRIORITY, IssueSortOptions.FREQ, IssueSortOptions.USER, ]; diff --git a/static/app/views/issueList/utils.tsx b/static/app/views/issueList/utils.tsx index cb855b7dca0e66..a56e35d22aa20d 100644 --- a/static/app/views/issueList/utils.tsx +++ b/static/app/views/issueList/utils.tsx @@ -174,7 +174,7 @@ export type QueryCounts = Partial>; export enum IssueSortOptions { DATE = 'date', NEW = 'new', - BETTER_PRIORITY = 'betterPriority', + PRIORITY = 'priority', FREQ = 'freq', USER = 'user', INBOX = 'inbox', @@ -190,7 +190,7 @@ export function getSortLabel(key: string) { switch (key) { case IssueSortOptions.NEW: return t('First Seen'); - case IssueSortOptions.BETTER_PRIORITY: + case IssueSortOptions.PRIORITY: return t('Priority'); case IssueSortOptions.FREQ: return t('Events'); From f24349234fad6aa31f9bbf8b67f52d243566061b Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Mon, 17 Jul 2023 14:31:00 -0700 Subject: [PATCH 50/67] ref(saved search): Migrate betterPriority to priority (#52909) Now that we've GA'd the new priority sort, we should clean up the saved searches that were made with the sort "betterPriority". Step 1: Front end PR to stop saving searches w/ betterPriority https://github.com/getsentry/sentry/pull/52910 Step 2: This PR Step 3: Backend PR https://github.com/getsentry/sentry/pull/52915 --- migrations_lockfile.txt | 2 +- .../0514_migrate_priority_saved_searches.py | 40 +++++++++++++++++++ 2 files changed, 41 insertions(+), 1 deletion(-) create mode 100644 src/sentry/migrations/0514_migrate_priority_saved_searches.py diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index 733c4108534636..86f67243fe509e 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -6,5 +6,5 @@ To resolve this, rebase against latest master and regenerate your migration. Thi will then be regenerated, and you should be able to merge without conflicts. nodestore: 0002_nodestore_no_dictfield -sentry: 0513_django_jsonfield +sentry: 0514_migrate_priority_saved_searches social_auth: 0001_initial diff --git a/src/sentry/migrations/0514_migrate_priority_saved_searches.py b/src/sentry/migrations/0514_migrate_priority_saved_searches.py new file mode 100644 index 00000000000000..65aa8ef93cd4eb --- /dev/null +++ b/src/sentry/migrations/0514_migrate_priority_saved_searches.py @@ -0,0 +1,40 @@ +# Generated by Django 3.2.20 on 2023-07-14 19:44 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration +from sentry.utils.query import RangeQuerySetWrapperWithProgressBar + + +def migrate_saved_searches(apps, schema_editor): + SavedSearch = apps.get_model("sentry", "SavedSearch") + for search in RangeQuerySetWrapperWithProgressBar(SavedSearch.objects.all()): + if search.sort == "betterPriority": + search.sort = "priority" + search.save() + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. For + # the most part, this should only be used for operations where it's safe to run the migration + # after your code has deployed. So this should not be used for most operations that alter the + # schema of a table. + # Here are some things that make sense to mark as dangerous: + # - Large data migrations. Typically we want these to be run manually by ops so that they can + # be monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # have ops run this and not block the deploy. Note that while adding an index is a schema + # change, it's completely safe to run the operation after the code has deployed. + is_dangerous = False + + dependencies = [ + ("sentry", "0513_django_jsonfield"), + ] + + operations = [ + migrations.RunPython( + migrate_saved_searches, + migrations.RunPython.noop, + hints={"tables": ["sentry_savedsearch"]}, + ), + ] From 3d67d3aa6ae76903cb90caf58971a3eca81516cb Mon Sep 17 00:00:00 2001 From: Gabe Villalobos Date: Mon, 17 Jul 2023 14:59:11 -0700 Subject: [PATCH 51/67] test(hybrid-cloud): Stabilizes sentry app install tests (#53004) --- src/sentry/models/organizationmember.py | 4 ++-- src/sentry/models/user.py | 10 +++++----- src/sentry/services/hybrid_cloud/organization/impl.py | 4 ++-- ...est_organization_sentry_app_installation_details.py | 2 +- .../sentry/deletions/test_sentry_app_installations.py | 2 +- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/sentry/models/organizationmember.py b/src/sentry/models/organizationmember.py index c67bcd6d5c0df5..86c8761bd7a0b1 100644 --- a/src/sentry/models/organizationmember.py +++ b/src/sentry/models/organizationmember.py @@ -251,7 +251,7 @@ class Meta: __org_roles_from_teams = None def delete(self, *args, **kwds): - with outbox_context(transaction.atomic()): + with outbox_context(transaction.atomic(using=router.db_for_write(OrganizationMember))): self.save_outbox_for_update() return super().delete(*args, **kwds) @@ -587,7 +587,7 @@ def approve_member_invitation( from sentry import audit_log from sentry.utils.audit import create_audit_entry_from_user - with transaction.atomic(): + with transaction.atomic(using=router.db_for_write(OrganizationMember)): self.approve_invite() self.save() diff --git a/src/sentry/models/user.py b/src/sentry/models/user.py index 6394bbdf958342..e3736974c9629f 100644 --- a/src/sentry/models/user.py +++ b/src/sentry/models/user.py @@ -5,7 +5,7 @@ from django.contrib.auth.models import AbstractBaseUser from django.contrib.auth.models import UserManager as DjangoUserManager from django.contrib.auth.signals import user_logged_out -from django.db import IntegrityError, models, transaction +from django.db import IntegrityError, models, router, transaction from django.db.models import Count, Subquery from django.db.models.query import QuerySet from django.dispatch import receiver @@ -162,7 +162,7 @@ def class_name(self): def delete(self): if self.username == "sentry": raise Exception('You cannot delete the "sentry" user as it is required by Sentry.') - with outbox_context(transaction.atomic(), flush=False): + with outbox_context(transaction.atomic(using=router.db_for_write(User)), flush=False): avatar = self.avatar.first() if avatar: avatar.delete() @@ -171,13 +171,13 @@ def delete(self): return super().delete() def update(self, *args, **kwds): - with outbox_context(transaction.atomic(), flush=False): + with outbox_context(transaction.atomic(using=router.db_for_write(User)), flush=False): for outbox in self.outboxes_for_update(): outbox.save() return super().update(*args, **kwds) def save(self, *args, **kwargs): - with outbox_context(transaction.atomic(), flush=False): + with outbox_context(transaction.atomic(using=router.db_for_write(User)), flush=False): if not self.username: self.username = self.email result = super().save(*args, **kwargs) @@ -323,7 +323,7 @@ def merge_to(from_user, to_user): for model in model_list: for obj in model.objects.filter(user_id=from_user.id): try: - with transaction.atomic(): + with transaction.atomic(using=router.db_for_write(User)): obj.update(user_id=to_user.id) except IntegrityError: pass diff --git a/src/sentry/services/hybrid_cloud/organization/impl.py b/src/sentry/services/hybrid_cloud/organization/impl.py index bb451a9017e956..091d99337ee3c6 100644 --- a/src/sentry/services/hybrid_cloud/organization/impl.py +++ b/src/sentry/services/hybrid_cloud/organization/impl.py @@ -2,7 +2,7 @@ from typing import Any, Iterable, List, Mapping, Optional, Set, Union, cast -from django.db import IntegrityError, models, transaction +from django.db import IntegrityError, models, router, transaction from django.dispatch import Signal from sentry import roles @@ -491,7 +491,7 @@ def update_region_user(self, *, user: RpcRegionUser, region_name: str) -> None: # Normally, calling update on a QS for organization member fails because we need to ensure that updates to # OrganizationMember objects produces outboxes. In this case, it is safe to do the update directly because # the attribute we are changing never needs to produce an outbox. - with unguarded_write(): + with unguarded_write(using=router.db_for_write(OrganizationMember)): OrganizationMember.objects.filter(user_id=user.id).update( user_is_active=user.is_active, user_email=user.email ) diff --git a/tests/sentry/api/endpoints/test_organization_sentry_app_installation_details.py b/tests/sentry/api/endpoints/test_organization_sentry_app_installation_details.py index 6d41893f98d7a6..b576527a938cb8 100644 --- a/tests/sentry/api/endpoints/test_organization_sentry_app_installation_details.py +++ b/tests/sentry/api/endpoints/test_organization_sentry_app_installation_details.py @@ -56,7 +56,7 @@ def setUp(self): ) -@control_silo_test() +@control_silo_test(stable=True) class GetSentryAppInstallationDetailsTest(SentryAppInstallationDetailsTest): def test_access_within_installs_organization(self): self.login_as(user=self.user) diff --git a/tests/sentry/deletions/test_sentry_app_installations.py b/tests/sentry/deletions/test_sentry_app_installations.py index 47893107bbfab3..5a4465cbadf8f3 100644 --- a/tests/sentry/deletions/test_sentry_app_installations.py +++ b/tests/sentry/deletions/test_sentry_app_installations.py @@ -16,7 +16,7 @@ from sentry.testutils.outbox import outbox_runner -class TestSentryAppIntallationDeletionTask(TestCase): +class TestSentryAppInstallationDeletionTask(TestCase): def setUp(self): self.user = self.create_user() self.org = self.create_organization() From 9b8aac1f3b28bdfed88c76cfa072d74ac1233f45 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Mon, 17 Jul 2023 22:01:51 +0000 Subject: [PATCH 52/67] release: 23.7.0 --- CHANGES | 50 ++++++++++++++++++++++++++++++++++++++++++++++++++ LICENSE | 2 +- setup.cfg | 2 +- 3 files changed, 52 insertions(+), 2 deletions(-) diff --git a/CHANGES b/CHANGES index 508fe2a404d65f..d4a30585876d64 100644 --- a/CHANGES +++ b/CHANGES @@ -1,3 +1,53 @@ +23.7.0 +------ + +### Escalating Issues V2 (EA) (ongoing) + +This is a placeholder milestone for tracking post-v1 work for escalating issues. +See the RFC: https://github.com/getsentry/rfcs/blob/main/text/0078-escalating-issues.md +There's also discussions about escalation in here: +https://github.com/getsentry/sentry/discussions/43039 + +V2 will focus on moving ongoing issues (upcoming) into the state archived-until-escalating after an issue being in the `ongoing` state for 14 days. + + +By: @snigdhas (#52360) + +### Search Shortcuts (ongoing) + +Make common searches more accessible and discoverable for users. [notion page](https://www.notion.so/sentry/Search-Shortcuts-fka-Assignee-Filter-on-Issue-Stream-648954e782d04805bc64f1983e5cbd16?pvs=4) + +By: @scttcper (#52414, #52348) + +### Various fixes & improvements + +- test(hybrid-cloud): Stabilizes sentry app install tests (#53004) by @GabeVillalobos +- ref(saved search): Migrate betterPriority to priority (#52909) by @ceorourke +- ref(sort): Change betterPriority to priority (#52910) by @ceorourke +- feat(crons): Use timeout_at for timed out check-ins (#52570) by @rjo100 +- ref(event_manager): Fix typing issues for event_manager (#52974) by @armenzg +- test(backup): Add more model tests (#52923) by @azaslavsky +- fix(replays): fix bin/mock-replays script (#52927) by @michellewzhang +- ref(crons): Change to uuid validator for trace (#52561) by @rjo100 +- fix(crons): Properly checks for environment name lengths (#52820) by @rjo100 +- feat(grouping): Add metrics for issue merging and unmerging (#52919) by @lobsterkatie +- chore(starfish): Adding referrers to starfish (#52749) by @wmak +- chore(starfish): Add starfish referrers to the backend (#52754) by @wmak +- ref(tsc): usageChart utils to tsx (#52935) by @IanWoodard +- chore(hybrid-cloud): Add tags to debug Identity service (#52997) by @dashed +- feat(starfish): Add unit and types for rates (#52991) by @wmak +- ref(replay): Refactor the "Next Breadcrumb" button to use *Frame types (#52931) by @ryan953 +- fix(group-attributes): log metric when post_save.send(update_fields=["status", "subs"]) is called for group (#52996) by @barkbarkimashark +- fix(releases): Use query parameter when switching filters (#52937) by @scttcper +- fix(discord): Change log level from error to info for unauthorized interactions (#52946) by @spalmurray +- ref: fix typing for sentry.models.__init__ (#52992) by @asottile-sentry +- feat(replay): Render the replay currentTime & hoverTime inside the performance waterfall (#52890) by @ryan953 +- feat(perf): Hide timing metrics on http (#52993) by @k-fish +- fix(issue-details): Fix react warning (#52907) by @roggenkemper +- fix(hybrid-cloud): Fixes tests for split DB mode app installs using transaction routing and silo annotations (#52932) by @GabeVillalobos + +_Plus 434 more_ + 23.6.2 ------ diff --git a/LICENSE b/LICENSE index 6af4b0d5225586..9ab1af241a96bb 100644 --- a/LICENSE +++ b/LICENSE @@ -16,7 +16,7 @@ Additional Use Grant: You may make use of the Licensed Work, provided that you d error-reporting or application monitoring features of the Licensed Work. -Change Date: 2026-07-06 +Change Date: 2026-07-17 Change License: Apache License, Version 2.0 diff --git a/setup.cfg b/setup.cfg index 0cf976a2c5fe4f..e7126f501d9167 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = sentry -version = 23.7.0.dev0 +version = 23.7.0 description = A realtime logging and aggregation server. long_description = file: README.md long_description_content_type = text/markdown From ecc36c67e00ccf0f35e494fd5de35f92ae5d8105 Mon Sep 17 00:00:00 2001 From: Alex Zaslavsky Date: Mon, 17 Jul 2023 15:08:22 -0700 Subject: [PATCH 53/67] test(backup): Add final model tests (#52924) This change adds all of the remaining model tests, save those added in PR #52778. Issue: getsentry/team-ospo#156 --- tests/sentry/backup/test_models.py | 176 ++++++++++++++++++++++++++++- 1 file changed, 172 insertions(+), 4 deletions(-) diff --git a/tests/sentry/backup/test_models.py b/tests/sentry/backup/test_models.py index 18f2fa21a1f13e..3cf636332db5a5 100644 --- a/tests/sentry/backup/test_models.py +++ b/tests/sentry/backup/test_models.py @@ -1,13 +1,15 @@ from __future__ import annotations import tempfile -from datetime import datetime +from datetime import datetime, timedelta from pathlib import Path from typing import Type +from uuid import uuid4 from click.testing import CliRunner from django.core.management import call_command from django.utils import timezone +from sentry_relay.auth import generate_key_pair from sentry.incidents.models import ( AlertRule, @@ -16,14 +18,25 @@ AlertRuleTrigger, AlertRuleTriggerAction, AlertRuleTriggerExclusion, + PendingIncidentSnapshot, + TimeSeriesSnapshot, ) +from sentry.models.actor import ACTOR_TYPES, Actor +from sentry.models.counter import Counter from sentry.models.dashboard import Dashboard, DashboardTombstone from sentry.models.dashboard_widget import ( DashboardWidget, DashboardWidgetQuery, DashboardWidgetTypes, ) +from sentry.models.email import Email from sentry.models.environment import Environment, EnvironmentProject +from sentry.models.integrations.sentry_app import SentryApp +from sentry.models.integrations.sentry_app_component import SentryAppComponent +from sentry.models.integrations.sentry_app_installation import SentryAppInstallation +from sentry.models.notificationaction import NotificationAction, NotificationActionProject +from sentry.models.options.option import ControlOption, Option +from sentry.models.options.organization_option import OrganizationOption from sentry.models.options.project_option import ProjectOption from sentry.models.options.user_option import UserOption from sentry.models.organization import Organization @@ -31,20 +44,38 @@ from sentry.models.organizationmapping import OrganizationMapping from sentry.models.organizationmember import OrganizationMember from sentry.models.organizationmemberteam import OrganizationMemberTeam +from sentry.models.orgauthtoken import OrgAuthToken from sentry.models.project import Project from sentry.models.projectbookmark import ProjectBookmark from sentry.models.projectkey import ProjectKey from sentry.models.projectownership import ProjectOwnership from sentry.models.projectredirect import ProjectRedirect from sentry.models.projectteam import ProjectTeam +from sentry.models.recentsearch import RecentSearch +from sentry.models.relay import Relay, RelayUsage +from sentry.models.repository import Repository +from sentry.models.rule import Rule, RuleActivity, RuleActivityType +from sentry.models.rulesnooze import RuleSnooze +from sentry.models.savedsearch import SavedSearch, Visibility +from sentry.models.search_common import SearchType +from sentry.models.servicehook import ServiceHook from sentry.models.team import Team from sentry.models.user import User from sentry.models.useremail import UserEmail from sentry.models.userip import UserIP from sentry.models.userpermission import UserPermission from sentry.models.userrole import UserRole, UserRoleUser -from sentry.monitors.models import Monitor, MonitorEnvironment, MonitorType, ScheduleType +from sentry.monitors.models import ( + CheckInStatus, + Monitor, + MonitorCheckIn, + MonitorEnvironment, + MonitorLocation, + MonitorType, + ScheduleType, +) from sentry.runner.commands.backup import import_, validate +from sentry.sentry_apps.apps import SentryAppUpdater from sentry.silo import unguarded_write from sentry.snuba.models import QuerySubscription, SnubaQuery, SnubaQueryEventType from sentry.testutils import TransactionTestCase @@ -161,6 +192,17 @@ def test_alert_rule_trigger(self): self.create_alert_rule_trigger_action(alert_rule_trigger=trigger) return self.import_export_then_validate() + @targets_models(ControlOption) + def test_control_option(self): + ControlOption.objects.create(key="foo", value="bar") + return self.import_export_then_validate() + + @targets_models(Counter) + def test_counter(self): + project = self.create_project() + Counter.increment(project, 1) + return self.import_export_then_validate() + @targets_models(Dashboard) def test_dashboard(self): self.create_dashboard() @@ -184,6 +226,11 @@ def test_dashboard_widget(self): DashboardWidgetQuery.objects.create(widget=widget, order=1, name="Test Query") return self.import_export_then_validate() + @targets_models(Email) + def test_email(self): + Email.objects.create(email="email@example.com") + return self.import_export_then_validate() + @targets_models(Environment) def test_environment(self): self.create_environment() @@ -201,14 +248,46 @@ def test_monitor(self): self.create_monitor() return self.import_export_then_validate() - @targets_models(MonitorEnvironment) + @targets_models(MonitorEnvironment, MonitorLocation) def test_monitor_environment(self): monitor = self.create_monitor() env = Environment.objects.create(organization_id=monitor.organization_id, name="test_env") - MonitorEnvironment.objects.create( + mon_env = MonitorEnvironment.objects.create( monitor=monitor, environment=env, ) + location = MonitorLocation.objects.create(guid=uuid4(), name="test_location") + MonitorCheckIn.objects.create( + monitor=monitor, + monitor_environment=mon_env, + location=location, + project_id=monitor.project_id, + status=CheckInStatus.IN_PROGRESS, + ) + return self.import_export_then_validate() + + @targets_models(NotificationAction, NotificationActionProject) + def test_notification_action(self): + self.create_notification_action(organization=self.organization, projects=[self.project]) + return self.import_export_then_validate() + + @targets_models(Option) + def test_option(self): + Option.objects.create(key="foo", value="bar") + return self.import_export_then_validate() + + @targets_models(OrgAuthToken) + def test_org_auth_token(self): + user = self.create_user() + org = self.create_organization(owner=user) + OrgAuthToken.objects.create( + organization_id=org.id, + name="token 1", + token_hashed="ABCDEF", + token_last_characters="xyz1", + scope_list=["org:ci"], + date_last_used=None, + ) return self.import_export_then_validate() @targets_models(Organization, OrganizationMapping) @@ -228,6 +307,14 @@ def test_organization_membership(self): OrganizationAccessRequest.objects.create(member=member, team=team) return self.import_export_then_validate() + @targets_models(OrganizationOption) + def test_organization_option(self): + organization = self.create_organization(name="test_org", owner=self.user) + OrganizationOption.objects.create( + organization=organization, key="sentry:account-rate-limit", value=0 + ) + return self.import_export_then_validate() + @targets_models(Project, ProjectKey, ProjectOption, ProjectTeam) def test_project(self): self.create_project() @@ -260,6 +347,87 @@ def test_project_redirect(self): ProjectRedirect.record(project, "old_slug") return self.import_export_then_validate() + @targets_models(Relay, RelayUsage) + def test_relay(self): + _, public_key = generate_key_pair() + relay_id = str(uuid4()) + Relay.objects.create(relay_id=relay_id, public_key=str(public_key), is_internal=True) + RelayUsage.objects.create(relay_id=relay_id, version="0.0.1", public_key=public_key) + return self.import_export_then_validate() + + @targets_models(Repository) + def test_repository(self): + Repository.objects.create( + name="test_repo", + organization_id=self.organization.id, + integration_id=self.integration.id, + ) + return self.import_export_then_validate() + + @targets_models(Rule, RuleActivity, RuleSnooze) + def test_rule(self): + rule = self.create_project_rule(project=self.project) + RuleActivity.objects.create(rule=rule, type=RuleActivityType.CREATED.value) + self.snooze_rule(user_id=self.user.id, owner_id=self.user.id, rule=rule) + return self.import_export_then_validate() + + @targets_models(RecentSearch, SavedSearch) + def test_search(self): + RecentSearch.objects.create( + organization=self.organization, + user_id=self.user.id, + type=SearchType.ISSUE.value, + query="some query", + ) + SavedSearch.objects.create( + organization=self.organization, + name="Saved query", + query="saved query", + visibility=Visibility.ORGANIZATION, + ) + return self.import_export_then_validate() + + @targets_models(SentryApp, SentryAppComponent, SentryAppInstallation) + def test_sentry_app(self): + app = self.create_sentry_app(name="test_app", organization=self.organization) + self.create_sentry_app_installation( + slug=app.slug, organization=self.organization, user=self.user + ) + updater = SentryAppUpdater(sentry_app=app) + updater.schema = {"elements": [self.create_alert_rule_action_schema()]} + updater.run(self.user) + return self.import_export_then_validate() + + @targets_models(PendingIncidentSnapshot, TimeSeriesSnapshot) + def test_snapshot(self): + incident = self.create_incident() + PendingIncidentSnapshot.objects.create( + incident=incident, target_run_date=datetime.utcnow() + timedelta(hours=4) + ) + TimeSeriesSnapshot.objects.create( + start=datetime.utcnow() - timedelta(hours=24), + end=datetime.utcnow(), + values=[[1.0, 2.0, 3.0], [1.5, 2.5, 3.5]], + period=1, + ) + return self.import_export_then_validate() + + @targets_models(ServiceHook) + def test_service_hook(self): + app = self.create_sentry_app() + actor = Actor.objects.create(type=ACTOR_TYPES["team"]) + install = self.create_sentry_app_installation(organization=self.organization, slug=app.slug) + ServiceHook.objects.create( + application_id=app.id, + actor_id=actor.id, + project_id=self.project.id, + organization_id=self.organization.id, + events=[], + installation_id=install.id, + url="https://example.com", + ) + return self.import_export_then_validate() + @targets_models(User, UserEmail, UserOption, UserPermission) def test_user(self): user = self.create_user() From 91ef3f00c4aa58643ab933f03511674e8b47b509 Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Mon, 17 Jul 2023 15:14:21 -0700 Subject: [PATCH 54/67] ref(sort): Replace betterPriority with priority (#52915) Now that the betterPriority sort is out and has replaced the old priority, just rename everything to priority for simplicity. Step 1: FE PR: https://github.com/getsentry/sentry/pull/52910 Step 2: Migration: https://github.com/getsentry/sentry/pull/52909 Step 3: This PR! --- src/sentry/constants.py | 1 - src/sentry/models/savedsearch.py | 2 - src/sentry/search/snuba/executors.py | 37 ++++++------- .../test_organization_group_index.py | 4 +- tests/snuba/search/test_backend.py | 52 +++++++++---------- 5 files changed, 44 insertions(+), 52 deletions(-) diff --git a/src/sentry/constants.py b/src/sentry/constants.py index 3e5492f470f1f6..53376c468aa804 100644 --- a/src/sentry/constants.py +++ b/src/sentry/constants.py @@ -46,7 +46,6 @@ def get_all_languages() -> List[str]: "date": _("Last Seen"), "new": _("First Seen"), "freq": _("Frequency"), - "better_priority": _("Better Priority"), } SEARCH_SORT_OPTIONS = { diff --git a/src/sentry/models/savedsearch.py b/src/sentry/models/savedsearch.py index b9208e10936a56..b6d90aeb4639f5 100644 --- a/src/sentry/models/savedsearch.py +++ b/src/sentry/models/savedsearch.py @@ -18,7 +18,6 @@ class SortOptions: FREQ = "freq" USER = "user" INBOX = "inbox" - BETTER_PRIORITY = "betterPriority" @classmethod def as_choices(cls): @@ -29,7 +28,6 @@ def as_choices(cls): (cls.FREQ, _("Events")), (cls.USER, _("Users")), (cls.INBOX, _("Date Added")), - (cls.BETTER_PRIORITY, _("Better Priority")), ) diff --git a/src/sentry/search/snuba/executors.py b/src/sentry/search/snuba/executors.py index 78bac15f0cf12e..3319dfbba42631 100644 --- a/src/sentry/search/snuba/executors.py +++ b/src/sentry/search/snuba/executors.py @@ -76,7 +76,7 @@ class PrioritySortWeights(TypedDict): @dataclass -class BetterPriorityParams: +class PriorityParams: # (event or issue age_hours) / (event or issue halflife hours) # any event or issue age that is greater than max_pow times the half-life hours will get clipped max_pow: int @@ -238,7 +238,7 @@ def _prepare_aggregations( end: datetime, having: Sequence[Sequence[Any]], aggregate_kwargs: Optional[PrioritySortWeights] = None, - replace_better_priority_aggregation: Optional[bool] = False, + replace_priority_aggregation: Optional[bool] = False, ) -> list[Any]: extra_aggregations = self.dependency_aggregations.get(sort_field, []) required_aggregations = set([sort_field, "total"] + extra_aggregations) @@ -249,8 +249,8 @@ def _prepare_aggregations( aggregations = [] for alias in required_aggregations: aggregation = self.aggregation_defs[alias] - if replace_better_priority_aggregation and alias in ["priority", "better_priority"]: - aggregation = self.aggregation_defs["better_priority_issue_platform"] + if replace_priority_aggregation and alias == "priority": + aggregation = self.aggregation_defs["priority_issue_platform"] if callable(aggregation): if aggregate_kwargs: aggregation = aggregation(start, end, aggregate_kwargs.get(alias, {})) @@ -302,10 +302,7 @@ def _prepare_params_for_category( else: conditions.append(converted_filter) - if ( - sort_field in ["priority", "better_priority"] - and group_category is not GroupCategory.ERROR.value - ): + if sort_field == "priority" and group_category is not GroupCategory.ERROR.value: aggregations = self._prepare_aggregations( sort_field, start, end, having, aggregate_kwargs, True ) @@ -503,13 +500,13 @@ def has_sort_strategy(self, sort_by: str) -> bool: return sort_by in self.sort_strategies.keys() -def better_priority_aggregation( +def priority_aggregation( start: datetime, end: datetime, aggregate_kwargs: PrioritySortWeights, ) -> Sequence[str]: - return better_priority_aggregation_impl( - BetterPriorityParams( + return priority_aggregation_impl( + PriorityParams( max_pow=16, min_score=0.01, event_age_weight=1, @@ -529,13 +526,13 @@ def better_priority_aggregation( ) -def better_priority_issue_platform_aggregation( +def priority_issue_platform_aggregation( start: datetime, end: datetime, aggregate_kwargs: PrioritySortWeights, ) -> Sequence[str]: - return better_priority_aggregation_impl( - BetterPriorityParams( + return priority_aggregation_impl( + PriorityParams( max_pow=16, min_score=0.01, event_age_weight=1, @@ -555,8 +552,8 @@ def better_priority_issue_platform_aggregation( ) -def better_priority_aggregation_impl( - params: BetterPriorityParams, +def priority_aggregation_impl( + params: PriorityParams, timestamp_column: str, use_stacktrace: bool, start: datetime, @@ -695,24 +692,22 @@ class PostgresSnubaQueryExecutor(AbstractQueryExecutor): "date": "last_seen", "freq": "times_seen", "new": "first_seen", - "priority": "better_priority", + "priority": "priority", "user": "user_count", # We don't need a corresponding snuba field here, since this sort only happens # in Postgres "inbox": "", - "betterPriority": "better_priority", } aggregation_defs = { "times_seen": ["count()", ""], "first_seen": ["multiply(toUInt64(min(timestamp)), 1000)", ""], "last_seen": ["multiply(toUInt64(max(timestamp)), 1000)", ""], - "priority": better_priority_aggregation, + "priority": priority_aggregation, # Only makes sense with WITH TOTALS, returns 1 for an individual group. "total": ["uniq", ISSUE_FIELD_NAME], "user_count": ["uniq", "tags[sentry:user]"], - "better_priority": better_priority_aggregation, - "better_priority_issue_platform": better_priority_issue_platform_aggregation, + "priority_issue_platform": priority_issue_platform_aggregation, } @property diff --git a/tests/snuba/api/endpoints/test_organization_group_index.py b/tests/snuba/api/endpoints/test_organization_group_index.py index 5dce9d0763f586..9e6cc816d91a50 100644 --- a/tests/snuba/api/endpoints/test_organization_group_index.py +++ b/tests/snuba/api/endpoints/test_organization_group_index.py @@ -114,7 +114,7 @@ def test_query_for_archived(self): assert len(response.data) == 1 assert response.data[0]["id"] == str(group.id) - def test_sort_by_better_priority(self): + def test_sort_by_priority(self): group = self.store_event( data={ "timestamp": iso_format(before_now(seconds=10)), @@ -164,7 +164,7 @@ def test_sort_by_better_priority(self): } response = self.get_success_response( - sort="betterPriority", + sort="priority", query="is:unresolved", limit=25, start=iso_format(before_now(days=1)), diff --git a/tests/snuba/search/test_backend.py b/tests/snuba/search/test_backend.py index 8a3777aab62926..b12e379cb98339 100644 --- a/tests/snuba/search/test_backend.py +++ b/tests/snuba/search/test_backend.py @@ -79,7 +79,7 @@ def make_query( if limit is not None: kwargs["limit"] = limit if aggregate_kwargs: - kwargs["aggregate_kwargs"] = {"better_priority": {**aggregate_kwargs}} + kwargs["aggregate_kwargs"] = {"priority": {**aggregate_kwargs}} return self.backend.query( projects, @@ -364,7 +364,7 @@ def test_sort(self): results = self.make_query(sort_by="user") assert list(results) == [self.group1, self.group2] - def test_better_priority_sort(self): + def test_priority_sort(self): weights: PrioritySortWeights = { "log_level": 5, "has_stacktrace": 5, @@ -375,7 +375,7 @@ def test_better_priority_sort(self): "norm": False, } results = self.make_query( - sort_by="betterPriority", + sort_by="priority", aggregate_kwargs=weights, ) assert list(results) == [self.group2, self.group1] @@ -2597,12 +2597,12 @@ def test_error_main_thread_no_results(self): assert len(results) == 0 -class EventsBetterPriorityTest(SharedSnubaTest, OccurrenceTestMixin): +class EventsPriorityTest(SharedSnubaTest, OccurrenceTestMixin): @property def backend(self): return EventsDatasetSnubaSearchBackend() - def test_better_priority_sort_old_and_new_events(self): + def test_priority_sort_old_and_new_events(self): """Test that an issue with only one old event is ranked lower than an issue with only one new event""" new_project = self.create_project(organization=self.project.organization) base_datetime = (datetime.utcnow() - timedelta(days=3)).replace(tzinfo=pytz.utc) @@ -2644,7 +2644,7 @@ def test_better_priority_sort_old_and_new_events(self): "norm": False, } results = self.make_query( - sort_by="betterPriority", + sort_by="priority", projects=[new_project], aggregate_kwargs=weights, ) @@ -2652,7 +2652,7 @@ def test_better_priority_sort_old_and_new_events(self): old_group = Group.objects.get(id=old_event.group.id) assert list(results) == [recent_group, old_group] - def test_better_priority_sort_v2(self): + def test_priority_sort_v2(self): """Test that the v2 formula works.""" new_project = self.create_project(organization=self.project.organization) base_datetime = (datetime.utcnow() - timedelta(days=3)).replace(tzinfo=pytz.utc) @@ -2694,7 +2694,7 @@ def test_better_priority_sort_v2(self): "norm": False, } results = self.make_query( - sort_by="betterPriority", + sort_by="priority", projects=[new_project], aggregate_kwargs=weights, ) @@ -2702,7 +2702,7 @@ def test_better_priority_sort_v2(self): old_group = Group.objects.get(id=old_event.group.id) assert list(results) == [recent_group, old_group] - def test_better_priority_log_level_results(self): + def test_priority_log_level_results(self): """Test that the scoring results change when we pass in different log level weights""" base_datetime = (datetime.utcnow() - timedelta(hours=1)).replace(tzinfo=pytz.utc) event1 = self.store_event( @@ -2733,7 +2733,7 @@ def test_better_priority_log_level_results(self): group2 = Group.objects.get(id=event2.group.id) agg_kwargs = { - "better_priority": { + "priority": { "log_level": 0, "has_stacktrace": 0, "relative_volume": 1, @@ -2749,7 +2749,7 @@ def test_better_priority_log_level_results(self): end=None, project_ids=[self.project.id], environment_ids=[], - sort_field="better_priority", + sort_field="priority", organization=self.organization, group_ids=[group1.id, group2.id], limit=150, @@ -2760,14 +2760,14 @@ def test_better_priority_log_level_results(self): # initially group 2's score is higher since it has a more recent event assert group2_score_before > group1_score_before - agg_kwargs["better_priority"].update({"log_level": 5}) + agg_kwargs["priority"].update({"log_level": 5}) results2 = query_executor.snuba_search( start=None, end=None, project_ids=[self.project.id], environment_ids=[], - sort_field="better_priority", + sort_field="priority", organization=self.organization, group_ids=[group1.id, group2.id], limit=150, @@ -2778,11 +2778,11 @@ def test_better_priority_log_level_results(self): # ensure fatal has a higher score than error assert group1_score_after > group2_score_after - def test_better_priority_has_stacktrace_results(self): + def test_priority_has_stacktrace_results(self): """Test that the scoring results change when we pass in different has_stacktrace weights""" base_datetime = (datetime.utcnow() - timedelta(hours=1)).replace(tzinfo=pytz.utc) agg_kwargs = { - "better_priority": { + "priority": { "log_level": 0, "has_stacktrace": 0, "relative_volume": 1, @@ -2833,7 +2833,7 @@ def test_better_priority_has_stacktrace_results(self): end=None, project_ids=[self.project.id], environment_ids=[], - sort_field="better_priority", + sort_field="priority", organization=self.organization, group_ids=[group1.id, group2.id], limit=150, @@ -2843,13 +2843,13 @@ def test_better_priority_has_stacktrace_results(self): group2_score = results[1][1] assert group1_score == group2_score - agg_kwargs["better_priority"].update({"has_stacktrace": 3}) + agg_kwargs["priority"].update({"has_stacktrace": 3}) results = query_executor.snuba_search( start=None, end=None, project_ids=[self.project.id], environment_ids=[], - sort_field="better_priority", + sort_field="priority", organization=self.organization, group_ids=[group1.id, group2.id], limit=150, @@ -2860,7 +2860,7 @@ def test_better_priority_has_stacktrace_results(self): # check that a group with an event with a stacktrace has a higher weight than one without assert group1_score < group2_score - def test_better_priority_event_halflife_results(self): + def test_priority_event_halflife_results(self): """Test that the scoring results change when we pass in different event halflife weights""" base_datetime = (datetime.utcnow() - timedelta(hours=1)).replace(tzinfo=pytz.utc) event1 = self.store_event( @@ -2891,7 +2891,7 @@ def test_better_priority_event_halflife_results(self): group2 = Group.objects.get(id=event2.group.id) agg_kwargs = { - "better_priority": { + "priority": { "log_level": 0, "has_stacktrace": 0, "relative_volume": 1, @@ -2907,7 +2907,7 @@ def test_better_priority_event_halflife_results(self): end=None, project_ids=[self.project.id], environment_ids=[], - sort_field="better_priority", + sort_field="priority", organization=self.organization, group_ids=[group1.id, group2.id], limit=150, @@ -2918,13 +2918,13 @@ def test_better_priority_event_halflife_results(self): # initially group 2's score is higher since it has a more recent event assert group2_score_before > group1_score_before - agg_kwargs["better_priority"].update({"event_halflife_hours": 2}) + agg_kwargs["priority"].update({"event_halflife_hours": 2}) results = query_executor.snuba_search( start=None, end=None, project_ids=[self.project.id], environment_ids=[], - sort_field="better_priority", + sort_field="priority", organization=self.organization, group_ids=[group1.id, group2.id], limit=150, @@ -2934,7 +2934,7 @@ def test_better_priority_event_halflife_results(self): group2_score_after = results[1][1] assert group1_score_after < group2_score_after - def test_better_priority_mixed_group_types(self): + def test_priority_mixed_group_types(self): base_datetime = (datetime.utcnow() - timedelta(hours=1)).replace(tzinfo=pytz.utc) error_event = self.store_event( @@ -2967,7 +2967,7 @@ def test_better_priority_mixed_group_types(self): profile_group_1 = group_info.group agg_kwargs = { - "better_priority": { + "priority": { "log_level": 0, "has_stacktrace": 0, "relative_volume": 1, @@ -2989,7 +2989,7 @@ def test_better_priority_mixed_group_types(self): end=None, project_ids=[self.project.id], environment_ids=[], - sort_field="better_priority", + sort_field="priority", organization=self.organization, group_ids=[profile_group_1.id, error_group.id], limit=150, From 8bb44313ba3b8ac35e6328dcfea444f42365a080 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Mon, 17 Jul 2023 15:23:25 -0700 Subject: [PATCH 55/67] fix(vscode): Add black formatter recommendation (#53016) --- .vscode/extensions.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 86145c3e760d59..689d4539d2a825 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -4,9 +4,11 @@ "recommendations": [ "esbenp.prettier-vscode", "ms-python.python", + "ms-python.black-formatter", "dbaeumer.vscode-eslint", "lextudio.restructuredtext", "ziyasal.vscode-open-in-github", - "timonwong.shellcheck" + "timonwong.shellcheck", + "tyriar.sort-lines", ] } From ae2952796947eaa0404407060616b11bd0217252 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Mon, 17 Jul 2023 15:34:26 -0700 Subject: [PATCH 56/67] fix(escalating): Add resolved status badge to weekly-report (#53011) --- src/sentry/tasks/weekly_reports.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/sentry/tasks/weekly_reports.py b/src/sentry/tasks/weekly_reports.py index 3abac0be4de9bb..743af6328b4a17 100644 --- a/src/sentry/tasks/weekly_reports.py +++ b/src/sentry/tasks/weekly_reports.py @@ -696,6 +696,8 @@ def get_group_status_badge(group: Group) -> Tuple[str, str, str]: Returns a tuple of (text, background_color, border_color) Should be similar to GroupStatusBadge.tsx in the frontend """ + if group.status == GroupStatus.RESOLVED: + return ("Resolved", "rgba(108, 95, 199, 0.08)", "rgba(108, 95, 199, 0.5)") if group.status == GroupStatus.UNRESOLVED: if group.substatus == GroupSubStatus.NEW: return ("New", "rgba(245, 176, 0, 0.08)", "rgba(245, 176, 0, 0.55)") From 3f12c0eb2d838b839292c8783033c2af9b0e1427 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Mon, 17 Jul 2023 22:59:04 +0000 Subject: [PATCH 57/67] meta: Bump new development version --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index e7126f501d9167..fbfa9644fa224d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = sentry -version = 23.7.0 +version = 23.8.0.dev0 description = A realtime logging and aggregation server. long_description = file: README.md long_description_content_type = text/markdown From 411cea3595907427c0ac5a69c8ee1f57ee53bdd3 Mon Sep 17 00:00:00 2001 From: Eric Hasegawa Date: Mon, 17 Jul 2023 16:09:00 -0700 Subject: [PATCH 58/67] style(auth): Improve spacing on OAuth connect (#52947) Takes the OAuth connect application page from looking like this: Screenshot 2023-07-15 at 3 37 14 PM To looking like this: Screenshot 2023-07-17 at 11 47 31 AM Here the client application is World0. I think this looks fine? If we want an improved design lmk. --- src/sentry/templates/sentry/login.html | 2 +- static/less/misc.less | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/sentry/templates/sentry/login.html b/src/sentry/templates/sentry/login.html index 4c6daf25914613..b47eae1faf8596 100644 --- a/src/sentry/templates/sentry/login.html +++ b/src/sentry/templates/sentry/login.html @@ -7,7 +7,7 @@ {% block auth_container %} {% if banner %} -

{{ banner }}

+

{{ banner }}

{% endif %}
diff --git a/static/less/misc.less b/static/less/misc.less index 41873cc30a2d5e..597bfed3ee0f12 100644 --- a/static/less/misc.less +++ b/static/less/misc.less @@ -68,6 +68,9 @@ .m-b-2 { margin-bottom: (@spacer-y * 1.5) !important; } +.m-l-1 { + margin-left: @spacer-x !important; +} // Padding From f52dfb5a2bf781f806eb8cf2be616a7de9c19f21 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Mon, 17 Jul 2023 16:49:39 -0700 Subject: [PATCH 59/67] ref(replays): make mock replay more interesting (#53021) The `bin/mock-replay` script now features a slightly more interesting replay recording, with some mock rage click frames. Screenshot 2023-07-17 at 4 02 11 PM For simplicity, I added small methods that created rage click and click segments rather than importing and reading a large JSON file with a bunch of frames, since I figured that a few hardcoded rage click events would be good enough for demo purposes :o) --- bin/mock-replay | 29 ++++++++++++++++++++- src/sentry/replays/testutils.py | 45 +++++++++++++++++++++++++++++++++ 2 files changed, 73 insertions(+), 1 deletion(-) diff --git a/bin/mock-replay b/bin/mock-replay index 799d6fab91219d..06bffba5f4d54d 100755 --- a/bin/mock-replay +++ b/bin/mock-replay @@ -19,10 +19,12 @@ from sentry.replays.models import ReplayRecordingSegment from sentry.replays.testutils import ( mock_replay, mock_rrweb_div_helloworld, + mock_segment_click, mock_segment_console, mock_segment_fullsnapshot, mock_segment_init, mock_segment_nagivation, + mock_segment_rageclick, ) @@ -42,6 +44,31 @@ def create_recording(replay_id, project_id, timestamp): mock_segment_nagivation( timestamp + timedelta(seconds=2), hrefFrom="/home/", hrefTo="/profile/" ), + mock_segment_rageclick( + timestamp + timedelta(seconds=2), + "nav.app-65yvxw.e1upz5ks6[aria-label='Primary Navigation'] > div.app-1v175cc.e1upz5ks4", + "sidebar-item-performance", + "a", + 24, + ), + mock_segment_click( + timestamp + timedelta(seconds=2), + "nav.app-65yvxw.e1upz5ks6[aria-label='Primary Navigation'] > div.app-1v175cc.e1upz5ks4", + "sidebar-item-performance", + "a", + ), + mock_segment_nagivation( + timestamp + timedelta(seconds=6), + hrefFrom="/profile/", + hrefTo="/performance/", + ), + mock_segment_rageclick( + timestamp + timedelta(seconds=6), + "nav.app-65yvxw.e1upz5ks6[aria-label='Primary Navigation'] > div.app-1v175cc.e1upz5ks4", + "sidebar-item-performance", + "a", + 24, + ), ] for (segment_id, segment) in enumerate(segments): store_replay_segments(replay_id, project_id, segment_id, segment) @@ -97,7 +124,7 @@ def main(): project.add_team(team) replay_id = uuid.uuid4().hex - seq1_timestamp = datetime.now() - timedelta(seconds=22) + seq1_timestamp = datetime.now() - timedelta(seconds=15) seq2_timestamp = datetime.now() - timedelta(seconds=5) click.echo("Creating Replay events entries...") diff --git a/src/sentry/replays/testutils.py b/src/sentry/replays/testutils.py index c84aac6062bc82..87b65894c2a178 100644 --- a/src/sentry/replays/testutils.py +++ b/src/sentry/replays/testutils.py @@ -347,6 +347,51 @@ def mock_segment_nagivation( ) +def mock_segment_click( + timestamp: datetime.datetime, message: str, id: str, tagName: str +) -> SegmentList: + return mock_segment_breadcrumb( + timestamp, + { + "timestamp": sec(timestamp), + "type": "default", + "category": "ui.click", + "message": message, + "data": { + "node": { + "tagName": tagName, + "attributes": { + "id": id, + }, + } + }, + }, + ) + + +def mock_segment_rageclick( + timestamp: datetime.datetime, message: str, id: str, tagName: str, clickCount: int +) -> SegmentList: + return mock_segment_breadcrumb( + timestamp, + { + "timestamp": sec(timestamp), # sentry data inside rrweb is in seconds + "type": "default", + "category": "ui.multiClick", + "message": message, + "data": { + "node": { + "tagName": tagName, + "attributes": { + "id": id, + }, + }, + "clickCount": clickCount, + }, + }, + ) + + __rrweb_id = 0 From 41ef06e5e3ef6df36c8e0e331638c7edb332d544 Mon Sep 17 00:00:00 2001 From: Gilbert Szeto Date: Mon, 17 Jul 2023 17:58:34 -0700 Subject: [PATCH 60/67] ref(search): clean-up assign-to-me feature flag usage (#52639) Cleaning up the `organizations:assign-to-me` feature flag since we've GAed the feature. --- static/app/utils/withIssueTags.spec.tsx | 8 +++++--- static/app/utils/withIssueTags.tsx | 4 +--- static/app/views/issueList/utils.spec.tsx | 11 +---------- static/app/views/issueList/utils.tsx | 6 ++---- 4 files changed, 9 insertions(+), 20 deletions(-) diff --git a/static/app/utils/withIssueTags.spec.tsx b/static/app/utils/withIssueTags.spec.tsx index 9bec6df9469797..3664d73d5192a4 100644 --- a/static/app/utils/withIssueTags.spec.tsx +++ b/static/app/utils/withIssueTags.spec.tsx @@ -64,7 +64,9 @@ describe('withIssueTags HoC', function () { ]); }); - expect(screen.getByText(/assigned: me, \[me, none\]/)).toBeInTheDocument(); + expect( + screen.getByText(/assigned: me, my_teams, \[me, my_teams, none\]/) + ).toBeInTheDocument(); act(() => { TeamStore.loadInitialData([ @@ -78,7 +80,7 @@ describe('withIssueTags HoC', function () { expect( screen.getByText( - /assigned: me, \[me, none\], #best-team-na, foo@example.com, joe@example.com/ + /assigned: me, my_teams, \[me, my_teams, none\], #best-team-na, foo@example.com, joe@example.com/ ) ).toBeInTheDocument(); @@ -111,7 +113,7 @@ describe('withIssueTags HoC', function () { }); expect(screen.getByTestId('Suggested Values')).toHaveTextContent( - 'me, [me, none], #best-team' + 'me, my_teams, [me, my_teams, none], #best-team' ); expect(screen.getByTestId('All Values')).toHaveTextContent( diff --git a/static/app/utils/withIssueTags.tsx b/static/app/utils/withIssueTags.tsx index 6748397b907ffa..f3c7b523fa642d 100644 --- a/static/app/utils/withIssueTags.tsx +++ b/static/app/utils/withIssueTags.tsx @@ -60,9 +60,7 @@ function withIssueTags( .filter(team => !team.isMember) .map(team => `#${team.slug}`); - const meAndMyTeams = props.organization.features.includes('assign-to-me') - ? ['my_teams', '[me, my_teams, none]'] - : ['[me, none]']; + const meAndMyTeams = ['my_teams', '[me, my_teams, none]']; const suggestedAssignees: string[] = ['me', ...meAndMyTeams, ...userTeams]; const assigndValues: SearchGroup[] | string[] = props.organization.features.includes('issue-search-shortcuts') diff --git a/static/app/views/issueList/utils.spec.tsx b/static/app/views/issueList/utils.spec.tsx index 5e56516c84a4ed..f403fb92fe371e 100644 --- a/static/app/views/issueList/utils.spec.tsx +++ b/static/app/views/issueList/utils.spec.tsx @@ -21,18 +21,9 @@ describe('getTabs', () => { }); it('should enable/disable my_teams filter in For Review tab', () => { - expect( - getTabs(TestStubs.Organization({features: ['assign-to-me']})).map(tab => tab[0]) - ).toEqual([ - 'is:unresolved', - 'is:unresolved is:for_review assigned_or_suggested:[me, my_teams, none]', - 'is:ignored', - '__custom__', - ]); - expect(getTabs(TestStubs.Organization({features: []})).map(tab => tab[0])).toEqual([ 'is:unresolved', - 'is:unresolved is:for_review assigned_or_suggested:[me, none]', + 'is:unresolved is:for_review assigned_or_suggested:[me, my_teams, none]', 'is:ignored', '__custom__', ]); diff --git a/static/app/views/issueList/utils.tsx b/static/app/views/issueList/utils.tsx index a56e35d22aa20d..7b6e39b01ce9b0 100644 --- a/static/app/views/issueList/utils.tsx +++ b/static/app/views/issueList/utils.tsx @@ -4,7 +4,6 @@ import {t, tct} from 'sentry/locale'; import {Organization} from 'sentry/types'; export enum Query { - FOR_REVIEW_OLD = 'is:unresolved is:for_review assigned_or_suggested:[me, none]', FOR_REVIEW = 'is:unresolved is:for_review assigned_or_suggested:[me, my_teams, none]', UNRESOLVED = 'is:unresolved', IGNORED = 'is:ignored', @@ -47,7 +46,6 @@ type OverviewTab = { */ export function getTabs(organization: Organization) { const hasEscalatingIssuesUi = organization.features.includes('escalating-issues'); - const hasAssignToMe = organization.features.includes('assign-to-me'); const tabs: Array<[string, OverviewTab]> = [ [ Query.UNRESOLVED, @@ -59,7 +57,7 @@ export function getTabs(organization: Organization) { }, ], [ - hasAssignToMe ? Query.FOR_REVIEW : Query.FOR_REVIEW_OLD, + Query.FOR_REVIEW, { name: t('For Review'), analyticsName: 'needs_review', @@ -219,7 +217,7 @@ export const DISCOVER_EXCLUSION_FIELDS: string[] = [ '__text', ]; -export const FOR_REVIEW_QUERIES: string[] = [Query.FOR_REVIEW, Query.FOR_REVIEW_OLD]; +export const FOR_REVIEW_QUERIES: string[] = [Query.FOR_REVIEW]; export const SAVED_SEARCHES_SIDEBAR_OPEN_LOCALSTORAGE_KEY = 'issue-stream-saved-searches-sidebar-open'; From c7c7855a5e502ee2cd9d06ccb5befc1ac2e98d96 Mon Sep 17 00:00:00 2001 From: Gilbert Szeto Date: Mon, 17 Jul 2023 17:58:42 -0700 Subject: [PATCH 61/67] ref(search): clean-up organizations:assign-to-me feature flag (#52306) Removes `organization:assign-to-me` feature flag and references which GA's the changes to `assign_to` and `assign_or_suggested` issue search filters to differentiate between `me` and `my_teams` values. Also cleaned up some tests that were created before the functional split change where the `me` filter value included the user's team(s). --- src/sentry/conf/server.py | 2 - src/sentry/features/__init__.py | 1 - src/sentry/search/snuba/backend.py | 30 +- .../api/serializers/test_organization.py | 1 - tests/sentry/api/test_issue_search.py | 7 +- tests/snuba/search/test_backend.py | 538 ++++++++---------- 6 files changed, 252 insertions(+), 327 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 2de1323bf20fd6..16cbbc897514c9 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1298,8 +1298,6 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: "organizations:alert-allow-indexed": False, # Enables tagging javascript errors from the browser console. "organizations:javascript-console-error-tag": False, - # Enables separate filters for user and user's teams - "organizations:assign-to-me": True, # Enables the cron job to auto-enable codecov integrations. "organizations:auto-enable-codecov": False, # Enables automatically linking repositories using commit webhook data diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 1e949f4faed426..9ae15205f30ad1 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -63,7 +63,6 @@ # Organization scoped features that are in development or in customer trials. default_manager.add("organizations:javascript-console-error-tag", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) -default_manager.add("organizations:assign-to-me", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:alert-allow-indexed", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:alert-crash-free-metrics", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:alert-filters", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) diff --git a/src/sentry/search/snuba/backend.py b/src/sentry/search/snuba/backend.py index 87162ec8bd9a4a..28a091b4be978c 100644 --- a/src/sentry/search/snuba/backend.py +++ b/src/sentry/search/snuba/backend.py @@ -10,7 +10,7 @@ from django.utils import timezone from django.utils.functional import SimpleLazyObject -from sentry import features, quotas +from sentry import quotas from sentry.api.event_search import SearchFilter from sentry.exceptions import InvalidSearchQuery from sentry.issues.grouptype import ErrorGroupType @@ -25,8 +25,6 @@ GroupOwner, GroupStatus, GroupSubscription, - OrganizationMember, - OrganizationMemberTeam, PlatformExternalIssue, Project, Release, @@ -41,7 +39,6 @@ PostgresSnubaQueryExecutor, PrioritySortWeights, ) -from sentry.search.utils import get_teams_for_users from sentry.utils.cursors import Cursor, CursorResult @@ -79,17 +76,6 @@ def assigned_to_filter( ).values_list("group_id", flat=True) } ) - organization = projects[0].organization - # Only add teams to query if assign-to-me flag is off - if not features.has("organizations:assign-to-me", organization, actor=None): - query |= Q( - **{ - f"{field_filter}__in": GroupAssignee.objects.filter( - project_id__in=[p.id for p in projects], - team_id__in=[team for team in get_teams_for_users(projects, users)], - ).values_list("group_id", flat=True) - } - ) if include_none: query |= unassigned_filter(True, projects, field_filter=field_filter) @@ -218,21 +204,7 @@ def assigned_or_suggested_filter( if "User" in types_to_owners: users = types_to_owners["User"] user_ids: List[int] = [u.id for u in users if u is not None] - team_ids = list( - Team.objects.filter( - id__in=OrganizationMemberTeam.objects.filter( - organizationmember__in=OrganizationMember.objects.filter( - user_id__in=user_ids, organization_id=organization_id - ), - is_active=True, - ).values("team") - ).values_list("id", flat=True) - ) - organization = projects[0].organization query_ids = Q(user_id__in=user_ids) - # Only add team_ids to query if assign-to-me flag is off - if not features.has("organizations:assign-to-me", organization, actor=None): - query_ids = query_ids | Q(team_id__in=team_ids) owned_by_me = Q( **{ f"{field_filter}__in": GroupOwner.objects.filter( diff --git a/tests/sentry/api/serializers/test_organization.py b/tests/sentry/api/serializers/test_organization.py index 10ab2458b8a2d6..a0f00a2f4294a8 100644 --- a/tests/sentry/api/serializers/test_organization.py +++ b/tests/sentry/api/serializers/test_organization.py @@ -56,7 +56,6 @@ def test_simple(self): assert result["id"] == str(organization.id) assert result["features"] == { "advanced-search", - "assign-to-me", "change-alerts", "crash-rate-alerts", "custom-symbol-sources", diff --git a/tests/sentry/api/test_issue_search.py b/tests/sentry/api/test_issue_search.py index c2591298ac22ce..82da40f622e559 100644 --- a/tests/sentry/api/test_issue_search.py +++ b/tests/sentry/api/test_issue_search.py @@ -26,7 +26,7 @@ from sentry.models.group import GROUP_SUBSTATUS_TO_STATUS_MAP, STATUS_QUERY_CHOICES, GroupStatus from sentry.search.utils import get_teams_for_users from sentry.testutils import TestCase -from sentry.testutils.helpers.features import apply_feature_flag_on_cls, with_feature +from sentry.testutils.helpers.features import apply_feature_flag_on_cls from sentry.testutils.silo import region_silo_test from sentry.types.group import SUBSTATUS_UPDATE_CHOICES, GroupSubStatus @@ -181,7 +181,6 @@ def test_invalid(self): @region_silo_test(stable=True) class ConvertQueryValuesTest(TestCase): - @with_feature("organizations:assign-to-me") def test_valid_assign_me_converter(self): filters = [SearchFilter(SearchKey("assigned_to"), "=", SearchValue("me"))] expected = value_converters["assigned_to"]( @@ -190,14 +189,12 @@ def test_valid_assign_me_converter(self): filters = convert_query_values(filters, [self.project], self.user, None) assert filters[0].value.raw_value == expected - @with_feature("organizations:assign-to-me") def test_valid_assign_me_no_converter(self): search_val = SearchValue("me") filters = [SearchFilter(SearchKey("something"), "=", search_val)] filters = convert_query_values(filters, [self.project], self.user, None) assert filters[0].value.raw_value == search_val.raw_value - @with_feature("organizations:assign-to-me") def test_valid_assign_my_teams_converter(self): filters = [SearchFilter(SearchKey("assigned_to"), "=", SearchValue("my_teams"))] expected = value_converters["assigned_to"]( @@ -206,7 +203,6 @@ def test_valid_assign_my_teams_converter(self): filters = convert_query_values(filters, [self.project], self.user, None) assert filters[0].value.raw_value == expected - @with_feature("organizations:assign-to-me") def test_valid_assign_my_teams_no_converter(self): search_val = SearchValue("my_teams") filters = [SearchFilter(SearchKey("something"), "=", search_val)] @@ -342,7 +338,6 @@ def test_user(self): ["me"], [self.project], self.user, None ) == convert_user_value(["me"], [self.project], self.user, None) - @with_feature("organizations:assign-to-me") def test_my_team(self): assert convert_actor_or_none_value( ["my_teams"], [self.project], self.user, None diff --git a/tests/snuba/search/test_backend.py b/tests/snuba/search/test_backend.py index b12e379cb98339..b5a4240ba37bcb 100644 --- a/tests/snuba/search/test_backend.py +++ b/tests/snuba/search/test_backend.py @@ -1074,9 +1074,8 @@ def test_assigned_to(self): ga.update(team=self.team, user_id=None) assert GroupAssignee.objects.get(id=ga.id).user_id is None - with Feature({"organizations:assign-to-me": False}): - results = self.make_query(search_filter_query="assigned:%s" % self.user.username) - assert set(results) == {self.group2} + results = self.make_query(search_filter_query="assigned:%s" % self.user.username) + assert set(results) == set() # test when there should be no results other_user = self.create_user() @@ -1115,27 +1114,18 @@ def test_assigned_to_me_my_teams(self): user_id=None, team_id=self.team.id, group=my_team_group, project=my_team_group.project ) - with Feature({"organizations:assign-to-me": False}): - self.run_test_query( - "assigned:me", - [my_team_group, self.group2], - user=self.user, - ) - assert not GroupAssignee.objects.filter( - user_id=self.user.id, group=my_team_group - ).exists() - - with self.feature("organizations:assign-to-me"): - self.run_test_query( - "assigned:me", - [self.group2], - user=self.user, - ) - self.run_test_query( - "assigned:my_teams", - [my_team_group], - user=self.user, - ) + self.run_test_query( + "assigned:me", + [self.group2], + user=self.user, + ) + assert not GroupAssignee.objects.filter(user_id=self.user.id, group=my_team_group).exists() + + self.run_test_query( + "assigned:my_teams", + [my_team_group], + user=self.user, + ) def test_assigned_to_me_my_teams_in_syntax(self): my_team_group = self.store_event( @@ -1159,32 +1149,29 @@ def test_assigned_to_me_my_teams_in_syntax(self): GroupAssignee.objects.create( user_id=None, team_id=self.team.id, group=my_team_group, project=my_team_group.project ) - with Feature({"organizations:assign-to-me": False}): - self.run_test_query( - "assigned:[me]", - [my_team_group, self.group2], - user=self.user, - ) - assert not GroupAssignee.objects.filter( - user_id=self.user.id, group=my_team_group - ).exists() - - with self.feature("organizations:assign-to-me"): - self.run_test_query( - "assigned:[me]", - [self.group2], - user=self.user, - ) - self.run_test_query( - "assigned:[my_teams]", - [my_team_group], - user=self.user, - ) - self.run_test_query( - "assigned:[me, my_teams]", - [self.group2, my_team_group], - user=self.user, - ) + + self.run_test_query( + "assigned:[me]", + [self.group2], + user=self.user, + ) + assert not GroupAssignee.objects.filter(user_id=self.user.id, group=my_team_group).exists() + + self.run_test_query( + "assigned:[me]", + [self.group2], + user=self.user, + ) + self.run_test_query( + "assigned:[my_teams]", + [my_team_group], + user=self.user, + ) + self.run_test_query( + "assigned:[me, my_teams]", + [self.group2, my_team_group], + user=self.user, + ) def test_assigned_to_in_syntax(self): group_3 = self.store_event( @@ -1222,23 +1209,22 @@ def test_assigned_to_in_syntax(self): user_id=self.user.id, group=self.group2, project=self.group2.project ) ga_2.update(team=self.team, user_id=None) - with Feature({"organizations:assign-to-me": False}): - self.run_test_query( - f"assigned:[{self.user.username}, {other_user.username}]", - [self.group2, group_3], - [self.group1], - ) - self.run_test_query( - f"assigned:[#{self.team.slug}, {other_user.username}]", - [self.group2, group_3], - [self.group1], - ) + self.run_test_query( + f"assigned:[{self.user.username}, {other_user.username}]", + [group_3], + [self.group1, self.group2], + ) + self.run_test_query( + f"assigned:[#{self.team.slug}, {other_user.username}]", + [self.group2, group_3], + [self.group1], + ) - self.run_test_query( - f"assigned:[me, none, {other_user.username}]", - [self.group1, self.group2, group_3], - [], - ) + self.run_test_query( + f"assigned:[me, none, {other_user.username}]", + [self.group1, group_3], + [self.group2], + ) def test_assigned_or_suggested_in_syntax(self): Group.objects.all().delete() @@ -1434,20 +1420,18 @@ def test_assigned_or_suggested_my_teams(self): project_id=self.project.id, ).group - # after the change to me -> (me + my_teams) - with self.feature("organizations:assign-to-me"): - self.run_test_query( - "assigned_or_suggested:me", - [], - [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group], - user=self.user, - ) - self.run_test_query( - "assigned_or_suggested:my_teams", - [], - [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group], - user=self.user, - ) + self.run_test_query( + "assigned_or_suggested:me", + [], + [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group], + user=self.user, + ) + self.run_test_query( + "assigned_or_suggested:my_teams", + [], + [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group], + user=self.user, + ) GroupOwner.objects.create( group=assigned_to_other_group, @@ -1469,20 +1453,18 @@ def test_assigned_or_suggested_my_teams(self): user_id=None, team_id=self.team.id, group=my_team_group, project=my_team_group.project ) - # after the change to me -> (me + my_teams) - with self.feature("organizations:assign-to-me"): - self.run_test_query( - "assigned_or_suggested:me", - [group, assigned_to_other_group], - [group1, group2, assigned_group, my_team_group], - user=self.user, - ) - self.run_test_query( - "assigned_or_suggested:my_teams", - [my_team_group], - [group, group1, group2, assigned_group, assigned_to_other_group], - user=self.user, - ) + self.run_test_query( + "assigned_or_suggested:me", + [group, assigned_to_other_group], + [group1, group2, assigned_group, my_team_group], + user=self.user, + ) + self.run_test_query( + "assigned_or_suggested:my_teams", + [my_team_group], + [group, group1, group2, assigned_group, assigned_to_other_group], + user=self.user, + ) # Because assigned_to_other_event is assigned to self.other_user, it should not show up in assigned_or_suggested search for anyone but self.other_user. (aka. they are now the only owner) other_user = self.create_user("other@user.com", is_superuser=False) @@ -1492,39 +1474,35 @@ def test_assigned_or_suggested_my_teams(self): user_id=other_user.id, ) - # after the change to me -> (me + my_teams) - with self.feature("organizations:assign-to-me"): - self.run_test_query( - "assigned_or_suggested:me", - [group], - [group1, group2, assigned_group, my_team_group, assigned_to_other_group], - user=self.user, - ) - self.run_test_query( - "assigned_or_suggested:my_teams", - [my_team_group], - [group, group1, group2, assigned_group, assigned_to_other_group], - user=self.user, - ) - self.run_test_query( - f"assigned_or_suggested:{other_user.email}", - [assigned_to_other_group], - [group, group1, group2, assigned_group, my_team_group], - user=self.user, - ) + self.run_test_query( + "assigned_or_suggested:me", + [group], + [group1, group2, assigned_group, my_team_group, assigned_to_other_group], + user=self.user, + ) + self.run_test_query( + "assigned_or_suggested:my_teams", + [my_team_group], + [group, group1, group2, assigned_group, assigned_to_other_group], + user=self.user, + ) + self.run_test_query( + f"assigned_or_suggested:{other_user.email}", + [assigned_to_other_group], + [group, group1, group2, assigned_group, my_team_group], + user=self.user, + ) GroupAssignee.objects.create( group=assigned_group, project=self.project, user_id=self.user.id ) - # after the change to me -> (me + my_teams) - with self.feature("organizations:assign-to-me"): - self.run_test_query( - f"assigned_or_suggested:{self.user.email}", - [assigned_group, group], - [group1, group2, my_team_group, assigned_to_other_group], - user=self.user, - ) + self.run_test_query( + f"assigned_or_suggested:{self.user.email}", + [assigned_group, group], + [group1, group2, my_team_group, assigned_to_other_group], + user=self.user, + ) GroupOwner.objects.create( group=group, @@ -1535,14 +1513,12 @@ def test_assigned_or_suggested_my_teams(self): user_id=None, ) - # after the change to me -> (me + my_teams) - with self.feature("organizations:assign-to-me"): - self.run_test_query( - f"assigned_or_suggested:#{self.team.slug}", - [group, my_team_group], - [group1, group2, assigned_group, assigned_to_other_group], - user=self.user, - ) + self.run_test_query( + f"assigned_or_suggested:#{self.team.slug}", + [group, my_team_group], + [group1, group2, assigned_group, assigned_to_other_group], + user=self.user, + ) def test_assigned_or_suggested_my_teams_in_syntax(self): Group.objects.all().delete() @@ -1598,26 +1574,24 @@ def test_assigned_or_suggested_my_teams_in_syntax(self): project_id=self.project.id, ).group - # after the change to me -> (me + my_teams) - with self.feature("organizations:assign-to-me"): - self.run_test_query( - "assigned_or_suggested:[me]", - [], - [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group], - user=self.user, - ) - self.run_test_query( - "assigned_or_suggested:[my_teams]", - [], - [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group], - user=self.user, - ) - self.run_test_query( - "assigned_or_suggested:[me, my_teams]", - [], - [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group], - user=self.user, - ) + self.run_test_query( + "assigned_or_suggested:[me]", + [], + [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group], + user=self.user, + ) + self.run_test_query( + "assigned_or_suggested:[my_teams]", + [], + [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group], + user=self.user, + ) + self.run_test_query( + "assigned_or_suggested:[me, my_teams]", + [], + [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group], + user=self.user, + ) GroupOwner.objects.create( group=assigned_to_other_group, @@ -1639,26 +1613,24 @@ def test_assigned_or_suggested_my_teams_in_syntax(self): user_id=None, team_id=self.team.id, group=my_team_group, project=my_team_group.project ) - # after the change to me -> (me + my_teams) - with self.feature("organizations:assign-to-me"): - self.run_test_query( - "assigned_or_suggested:[me]", - [group, assigned_to_other_group], - [group1, group2, assigned_group, my_team_group], - user=self.user, - ) - self.run_test_query( - "assigned_or_suggested:[my_teams]", - [my_team_group], - [group, group1, group2, assigned_group, assigned_to_other_group], - user=self.user, - ) - self.run_test_query( - "assigned_or_suggested:[me, my_teams]", - [group, assigned_to_other_group, my_team_group], - [group1, group2, assigned_group], - user=self.user, - ) + self.run_test_query( + "assigned_or_suggested:[me]", + [group, assigned_to_other_group], + [group1, group2, assigned_group, my_team_group], + user=self.user, + ) + self.run_test_query( + "assigned_or_suggested:[my_teams]", + [my_team_group], + [group, group1, group2, assigned_group, assigned_to_other_group], + user=self.user, + ) + self.run_test_query( + "assigned_or_suggested:[me, my_teams]", + [group, assigned_to_other_group, my_team_group], + [group1, group2, assigned_group], + user=self.user, + ) # Because assigned_to_other_event is assigned to self.other_user, it should not show up in assigned_or_suggested search for anyone but self.other_user. (aka. they are now the only owner) other_user = self.create_user("other@user.com", is_superuser=False) @@ -1668,45 +1640,41 @@ def test_assigned_or_suggested_my_teams_in_syntax(self): user_id=other_user.id, ) - # after the change to me -> (me + my_teams) - with self.feature("organizations:assign-to-me"): - self.run_test_query( - "assigned_or_suggested:[me]", - [group], - [group1, group2, assigned_group, my_team_group, assigned_to_other_group], - user=self.user, - ) - self.run_test_query( - "assigned_or_suggested:[my_teams]", - [my_team_group], - [group, group1, group2, assigned_group, assigned_to_other_group], - user=self.user, - ) - self.run_test_query( - "assigned_or_suggested:[me, my_teams]", - [group, my_team_group], - [group1, group2, assigned_group, assigned_to_other_group], - user=self.user, - ) - self.run_test_query( - f"assigned_or_suggested:[{other_user.email}]", - [assigned_to_other_group], - [group, group1, group2, assigned_group, my_team_group], - user=self.user, - ) + self.run_test_query( + "assigned_or_suggested:[me]", + [group], + [group1, group2, assigned_group, my_team_group, assigned_to_other_group], + user=self.user, + ) + self.run_test_query( + "assigned_or_suggested:[my_teams]", + [my_team_group], + [group, group1, group2, assigned_group, assigned_to_other_group], + user=self.user, + ) + self.run_test_query( + "assigned_or_suggested:[me, my_teams]", + [group, my_team_group], + [group1, group2, assigned_group, assigned_to_other_group], + user=self.user, + ) + self.run_test_query( + f"assigned_or_suggested:[{other_user.email}]", + [assigned_to_other_group], + [group, group1, group2, assigned_group, my_team_group], + user=self.user, + ) GroupAssignee.objects.create( group=assigned_group, project=self.project, user_id=self.user.id ) - # after the change to me -> (me + my_teams) - with self.feature("organizations:assign-to-me"): - self.run_test_query( - f"assigned_or_suggested:[{self.user.email}]", - [assigned_group, group], - [group1, group2, my_team_group, assigned_to_other_group], - user=self.user, - ) + self.run_test_query( + f"assigned_or_suggested:[{self.user.email}]", + [assigned_group, group], + [group1, group2, my_team_group, assigned_to_other_group], + user=self.user, + ) GroupOwner.objects.create( group=group, @@ -1717,32 +1685,30 @@ def test_assigned_or_suggested_my_teams_in_syntax(self): user_id=None, ) - # after the change to me -> (me + my_teams) - with self.feature("organizations:assign-to-me"): - self.run_test_query( - f"assigned_or_suggested:[#{self.team.slug}]", - [group, my_team_group], - [group1, group2, assigned_group, assigned_to_other_group], - user=self.user, - ) - self.run_test_query( - "assigned_or_suggested:[me, none]", - [group, group1, group2, assigned_group], - [my_team_group, assigned_to_other_group], - user=self.user, - ) - self.run_test_query( - "assigned_or_suggested:[my_teams, none]", - [group, group1, group2, my_team_group], - [assigned_to_other_group, assigned_group], - user=self.user, - ) - self.run_test_query( - "assigned_or_suggested:[me, my_teams, none]", - [group, group1, group2, my_team_group, assigned_group], - [assigned_to_other_group], - user=self.user, - ) + self.run_test_query( + f"assigned_or_suggested:[#{self.team.slug}]", + [group, my_team_group], + [group1, group2, assigned_group, assigned_to_other_group], + user=self.user, + ) + self.run_test_query( + "assigned_or_suggested:[me, none]", + [group, group1, group2, assigned_group], + [my_team_group, assigned_to_other_group], + user=self.user, + ) + self.run_test_query( + "assigned_or_suggested:[my_teams, none]", + [group, group1, group2, my_team_group], + [assigned_to_other_group, assigned_group], + user=self.user, + ) + self.run_test_query( + "assigned_or_suggested:[me, my_teams, none]", + [group, group1, group2, my_team_group, assigned_group], + [assigned_to_other_group], + user=self.user, + ) not_me = self.create_user(email="notme@sentry.io") GroupOwner.objects.create( @@ -1754,67 +1720,63 @@ def test_assigned_or_suggested_my_teams_in_syntax(self): user_id=not_me.id, ) - # after the change to me -> (me + my_teams) - with self.feature("organizations:assign-to-me"): - self.run_test_query( - "assigned_or_suggested:[me, none]", - [group, group1, assigned_group], - [group2, my_team_group, assigned_to_other_group], - user=self.user, - ) - self.run_test_query( - "assigned_or_suggested:[my_teams, none]", - [group, group1, my_team_group], - [group2, assigned_group, assigned_to_other_group], - user=self.user, - ) - self.run_test_query( - "assigned_or_suggested:[me, my_teams, none]", - [group, group1, my_team_group, assigned_group], - [group2, assigned_to_other_group], - user=self.user, - ) + self.run_test_query( + "assigned_or_suggested:[me, none]", + [group, group1, assigned_group], + [group2, my_team_group, assigned_to_other_group], + user=self.user, + ) + self.run_test_query( + "assigned_or_suggested:[my_teams, none]", + [group, group1, my_team_group], + [group2, assigned_group, assigned_to_other_group], + user=self.user, + ) + self.run_test_query( + "assigned_or_suggested:[me, my_teams, none]", + [group, group1, my_team_group, assigned_group], + [group2, assigned_to_other_group], + user=self.user, + ) GroupOwner.objects.filter(group=group, user_id=self.user.id).delete() - # after the change to me -> (me + my_teams) - with self.feature("organizations:assign-to-me"): - self.run_test_query( - f"assigned_or_suggested:[me, none, #{self.team.slug}]", - [group, group1, assigned_group, my_team_group], - [assigned_to_other_group, group2], - user=self.user, - ) - self.run_test_query( - f"assigned_or_suggested:[my_teams, none, #{self.team.slug}]", - [group, group1, my_team_group], - [assigned_to_other_group, group2, assigned_group], - user=self.user, - ) - self.run_test_query( - f"assigned_or_suggested:[me, my_teams, none, #{self.team.slug}]", - [group, group1, my_team_group, assigned_group], - [assigned_to_other_group, group2], - user=self.user, - ) - self.run_test_query( - f"assigned_or_suggested:[me, none, #{self.team.slug}, {not_me.email}]", - [group, group1, group2, assigned_group, my_team_group], - [assigned_to_other_group], - user=self.user, - ) - self.run_test_query( - f"assigned_or_suggested:[my_teams, none, #{self.team.slug}, {not_me.email}]", - [group, group1, group2, my_team_group], - [assigned_to_other_group, assigned_group], - user=self.user, - ) - self.run_test_query( - f"assigned_or_suggested:[me, my_teams, none, #{self.team.slug}, {not_me.email}]", - [group, group1, group2, my_team_group, assigned_group], - [assigned_to_other_group], - user=self.user, - ) + self.run_test_query( + f"assigned_or_suggested:[me, none, #{self.team.slug}]", + [group, group1, assigned_group, my_team_group], + [assigned_to_other_group, group2], + user=self.user, + ) + self.run_test_query( + f"assigned_or_suggested:[my_teams, none, #{self.team.slug}]", + [group, group1, my_team_group], + [assigned_to_other_group, group2, assigned_group], + user=self.user, + ) + self.run_test_query( + f"assigned_or_suggested:[me, my_teams, none, #{self.team.slug}]", + [group, group1, my_team_group, assigned_group], + [assigned_to_other_group, group2], + user=self.user, + ) + self.run_test_query( + f"assigned_or_suggested:[me, none, #{self.team.slug}, {not_me.email}]", + [group, group1, group2, assigned_group, my_team_group], + [assigned_to_other_group], + user=self.user, + ) + self.run_test_query( + f"assigned_or_suggested:[my_teams, none, #{self.team.slug}, {not_me.email}]", + [group, group1, group2, my_team_group], + [assigned_to_other_group, assigned_group], + user=self.user, + ) + self.run_test_query( + f"assigned_or_suggested:[me, my_teams, none, #{self.team.slug}, {not_me.email}]", + [group, group1, group2, my_team_group, assigned_group], + [assigned_to_other_group], + user=self.user, + ) def test_assigned_to_with_environment(self): results = self.make_query( From 8ac1a44cdacbafa67be78d45673505af0e09a466 Mon Sep 17 00:00:00 2001 From: Alberto Leal Date: Mon, 17 Jul 2023 22:19:05 -0400 Subject: [PATCH 62/67] chore(hybrid-cloud): Add SENTRY_CONTROL_ADDRESS and SENTRY_SUBNET_SECRET to server.py (#53026) --- src/sentry/conf/server.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 16cbbc897514c9..90cfd55a942c07 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -632,6 +632,13 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: # Fallback region name for monolith deployments SENTRY_MONOLITH_REGION: str = "--monolith--" +# Control silo address (public or private). +# Usecases include sending requests to the Integration Proxy Endpoint. +SENTRY_CONTROL_ADDRESS = os.environ.get("SENTRY_CONTROL_ADDRESS", None) + +# The key used for generating or verifying the HMAC signature for Integration Proxy Endpoint requests. +SENTRY_SUBNET_SECRET = os.environ.get("SENTRY_SUBNET_SECRET", None) + # Queue configuration from kombu import Exchange, Queue From 8b1517a5f0d65d1e0634263fae936689ddc5b1fc Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Tue, 18 Jul 2023 09:11:49 +0200 Subject: [PATCH 63/67] ref(getting-started-docs): Migrate kotlin doc to sentry main repo (#52966) --- .../gettingStartedDoc/sdkDocumentation.tsx | 1 + .../gettingStartedDocs/kotlin/kotlin.spec.tsx | 20 ++ .../app/gettingStartedDocs/kotlin/kotlin.tsx | 182 ++++++++++++++++++ 3 files changed, 203 insertions(+) create mode 100644 static/app/gettingStartedDocs/kotlin/kotlin.spec.tsx create mode 100644 static/app/gettingStartedDocs/kotlin/kotlin.tsx diff --git a/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx b/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx index e9efcf0b1a877e..7d5d5c70698617 100644 --- a/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx +++ b/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx @@ -29,6 +29,7 @@ export const migratedDocs = [ 'minidump', 'native', 'native-qt', + 'kotlin', 'node', ]; diff --git a/static/app/gettingStartedDocs/kotlin/kotlin.spec.tsx b/static/app/gettingStartedDocs/kotlin/kotlin.spec.tsx new file mode 100644 index 00000000000000..c51da6c29a8e4f --- /dev/null +++ b/static/app/gettingStartedDocs/kotlin/kotlin.spec.tsx @@ -0,0 +1,20 @@ +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import {StepTitle} from 'sentry/components/onboarding/gettingStartedDoc/step'; + +import {GettingStartedWithKotlin, steps} from './kotlin'; + +describe('GettingStartedWithKotlin', function () { + it('renders doc correctly', function () { + const {container} = render(); + + // Steps + for (const step of steps()) { + expect( + screen.getByRole('heading', {name: step.title ?? StepTitle[step.type]}) + ).toBeInTheDocument(); + } + + expect(container).toSnapshot(); + }); +}); diff --git a/static/app/gettingStartedDocs/kotlin/kotlin.tsx b/static/app/gettingStartedDocs/kotlin/kotlin.tsx new file mode 100644 index 00000000000000..76c17022ecf563 --- /dev/null +++ b/static/app/gettingStartedDocs/kotlin/kotlin.tsx @@ -0,0 +1,182 @@ +import {Fragment} from 'react'; + +import ExternalLink from 'sentry/components/links/externalLink'; +import {Layout, LayoutProps} from 'sentry/components/onboarding/gettingStartedDoc/layout'; +import {ModuleProps} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation'; +import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step'; +import {t, tct} from 'sentry/locale'; + +// Configuration Start +const introduction = tct( + "Sentry supports Kotlin for both JVM and [Android. This wizard guides you through set up in the JVM scenario. If you're interested in [strong:Android], head over to the [gettingStartedWithAndroidLink:Getting Started] for that SDK instead. At its core, Sentry for Java provides a raw client for sending events to Sentry. If you use [strong:Spring Boot, Spring, Logback, JUL, or Log4j2], head over to our [gettingStartedWithJavaLink:Getting Started for Sentry Java].", + { + gettingStartedWithAndroidLink: ( + + ), + gettingStartedWithJavaLink: ( + + ), + strong: , + } +); + +export const steps = ({ + dsn, +}: { + dsn?: string; +} = {}): LayoutProps['steps'] => [ + { + type: StepType.INSTALL, + description: t('Install the SDK via Gradle or Maven:'), + configurations: [ + { + language: 'groovy', + description: ( +

+ {tct('For [strong:Gradle], add to your [code:build.gradle] file:', { + strong: , + code: , + })} +

+ ), + code: ` +// Make sure mavenCentral is there. +repositories { + mavenCentral() +} + +dependencies { + implementation 'io.sentry:sentry:{{@inject packages.version('sentry.java', '4.0.0') }}' +} + `, + }, + { + language: 'xml', + description: ( +

+ {tct('For [strong:Maven], add to your [code:pom.xml] file:', { + strong: , + code: , + })} +

+ ), + code: ` + + io.sentry + sentry + 6.25.0 + + `, + }, + ], + }, + { + type: StepType.CONFIGURE, + description: ( +

+ {tct("Configure Sentry as soon as possible in your application's lifecycle:", { + code: , + })} +

+ ), + configurations: [ + { + language: 'kotlin', + code: ` +import io.sentry.Sentry + +Sentry.init { options -> + options.dsn = "${dsn}" + // Set tracesSampleRate to 1.0 to capture 100% of transactions for performance monitoring. + // We recommend adjusting this value in production. + options.tracesSampleRate = 1.0 + // When first trying Sentry it's good to see what the SDK is doing: + options.isDebug = true +} + `, + }, + ], + }, + { + type: StepType.VERIFY, + description: ( +

+ {tct( + 'Trigger your first event from your development environment by intentionally creating an error with the [code:Sentry#captureException] method, to test that everything is working:', + {code: } + )} +

+ ), + configurations: [ + { + language: 'kotlin', + code: ` +import java.lang.Exception +import io.sentry.Sentry + +try { + throw Exception("This is a test.") +} catch (e: Exception) { + Sentry.captureException(e) +} + `, + }, + ], + additionalInfo: ( + + {t( + "If you're new to Sentry, use the email alert to access your account and complete a product tour." + )} +

+ {t( + "If you're an existing user and have disabled alerts, you won't receive this email." + )} +

+
+ ), + }, + { + title: t('Measure Performance'), + description: t('You can capture transactions using the SDK. For example:'), + configurations: [ + { + language: 'kotlin', + code: ` +import io.sentry.Sentry +import io.sentry.SpanStatus + +// A good name for the transaction is key, to help identify what this is about +val transaction = Sentry.startTransaction("processOrderBatch()", "task") +try { + processOrderBatch() +} catch (e: Exception) { + transaction.throwable = e + transaction.status = SpanStatus.INTERNAL_ERROR +throw e +} finally { + transaction.finish(); +} + `, + }, + ], + additionalInfo: ( +

+ {tct( + 'For more information about the API and automatic instrumentations included in the SDK, visit the docs.', + { + docsLink: ( + + ), + } + )} +

+ ), + }, +]; +// Configuration End + +export function GettingStartedWithKotlin({dsn, ...props}: ModuleProps) { + return ; +} + +export default GettingStartedWithKotlin; From 0845067c676fe6c846fecc0d9b71d0887e86908c Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Tue, 18 Jul 2023 09:13:28 +0200 Subject: [PATCH 64/67] ref(getting-started-docs): Migrate symfony doc to sentry main repo (#52970) --- .../gettingStartedDoc/sdkDocumentation.tsx | 1 + .../gettingStartedDocs/php/symfony.spec.tsx | 20 ++ static/app/gettingStartedDocs/php/symfony.tsx | 278 ++++++++++++++++++ 3 files changed, 299 insertions(+) create mode 100644 static/app/gettingStartedDocs/php/symfony.spec.tsx create mode 100644 static/app/gettingStartedDocs/php/symfony.tsx diff --git a/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx b/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx index 7d5d5c70698617..c057a48500db91 100644 --- a/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx +++ b/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx @@ -24,6 +24,7 @@ export const migratedDocs = [ 'java-spring-boot', 'php', 'php-laravel', + 'php-symfony', 'go', 'rust', 'minidump', diff --git a/static/app/gettingStartedDocs/php/symfony.spec.tsx b/static/app/gettingStartedDocs/php/symfony.spec.tsx new file mode 100644 index 00000000000000..302d1e49cf8c1e --- /dev/null +++ b/static/app/gettingStartedDocs/php/symfony.spec.tsx @@ -0,0 +1,20 @@ +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import {StepTitle} from 'sentry/components/onboarding/gettingStartedDoc/step'; + +import {GettingStartedWithSymfony, steps} from './symfony'; + +describe('GettingStartedWithSymfony', function () { + it('renders doc correctly', function () { + const {container} = render(); + + // Steps + for (const step of steps()) { + expect( + screen.getByRole('heading', {name: step.title ?? StepTitle[step.type]}) + ).toBeInTheDocument(); + } + + expect(container).toSnapshot(); + }); +}); diff --git a/static/app/gettingStartedDocs/php/symfony.tsx b/static/app/gettingStartedDocs/php/symfony.tsx new file mode 100644 index 00000000000000..1f80799e6f0755 --- /dev/null +++ b/static/app/gettingStartedDocs/php/symfony.tsx @@ -0,0 +1,278 @@ +import {Fragment} from 'react'; + +import ExternalLink from 'sentry/components/links/externalLink'; +import {Layout, LayoutProps} from 'sentry/components/onboarding/gettingStartedDoc/layout'; +import {ModuleProps} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation'; +import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step'; +import {t, tct} from 'sentry/locale'; + +// Configuration Start +const introduction = tct( + 'Symfony is supported via the [code:sentry-symfony] package as a native bundle.', + {code: } +); + +export const steps = ({ + dsn, +}: { + dsn?: string; +} = {}): LayoutProps['steps'] => [ + { + type: StepType.INSTALL, + configurations: [ + { + language: 'bash', + description: ( +

+ {tct('Install the [code:sentry/sentry-symfony] bundle:', {code: })} +

+ ), + code: 'composer require sentry/sentry-symfony', + }, + { + language: 'yaml', + description: ( +

+ {tct( + 'Due to a bug in all versions below "6.0" of the [code:SensioFrameworkExtraBundle] bundle, you will likely receive an error during the execution of the command above related to the missing [code:NyholmPsr7FactoryPsr17Factory] class. To workaround the issue, if you are not using the PSR-7 bridge, please change the configuration of that bundle as follows:', + {code: } + )} +

+ ), + code: ` +sensio_framework_extra: + psr_message: + enabled: false + `, + additionalInfo: ( +

+ {tct( + 'For more details about the issue see [link:https://github.com/sensiolabs/SensioFrameworkExtraBundle/pull/710].', + { + link: ( + + ), + } + )} +

+ ), + }, + ], + }, + { + type: StepType.CONFIGURE, + configurations: [ + { + description: ( +

+ {tct('Add your DSN to [code:config/packages/sentry.yaml]:', {code: })} +

+ ), + language: 'php', + code: ` +sentry: + dsn: "%env(${dsn})%" + `, + }, + { + description:

{tct('And in your [code:.env] file:', {code: })}

, + language: 'plain', + code: ` +###> sentry/sentry-symfony ### +SENTRY_DSN="${dsn}" +###< sentry/sentry-symfony ### + `, + }, + ], + }, + { + type: StepType.VERIFY, + description: ( +

+ {tct( + 'To test that both logger error and exception are correctly sent to [sentryLink:sentry.io], you can create the following controller:', + { + sentryLink: , + } + )} +

+ ), + configurations: [ + { + language: 'php', + code: ` +logger = $logger; + } + + /** + * @Route(name="sentry_test", path="/_sentry-test") + */ + public function testLog() + { + // the following code will test if monolog integration logs to sentry + $this->logger->error('My custom logged error.'); + + // the following code will test if an uncaught exception logs to sentry + throw new \RuntimeException('Example exception.'); + } +} + `, + }, + ], + additionalInfo: ( +

+ {tct( + "After you visit the [code:/_sentry-test page], you can view and resolve the recorded error by logging into [sentryLink:sentry.io] and opening your project. Clicking on the error's title will open a page where you can see detailed information and mark it as resolved.", + {sentryLink: , code: } + )} +

+ ), + }, + { + title: t('Performance monitoring'), + description: ( + + {t('Performance monitoring integrations to support tracing')} +

+ {t( + 'The process of logging the events that took place during a request, often across multiple services are enabled by default. To use them, update to the latest version of the SDK.' + )} +

+

+ {tct( + 'These integrations hook into critical paths of the framework and of the vendors. As a result, there may be a performance penalty. To disable tracing, please see the [integrationDocumentationLink:Integrations documentation].', + { + integrationDocumentationLink: ( + + ), + } + )} +

+
+ ), + configurations: [ + { + description: ( +

+ {tct( + "If you [strong:are not] using Symfony Flex, you'll also need to enable the bundle in [code:config/bundles.php]:", + { + code: , + strong: , + } + )} +

+ ), + language: 'php', + code: ` + ['all' => true], + ]; + `, + }, + ], + }, + { + title: t('Monolog Integration'), + configurations: [ + { + description: ( +

+ {tct( + 'If you are using [monologLink:Monolog] to report events instead of the typical error listener approach, you need this additional configuration to log the errors correctly:', + { + monologLink: , + } + )} +

+ ), + language: 'yaml', + code: ` +sentry: + register_error_listener: false # Disables the ErrorListener to avoid duplicated log in sentry + register_error_handler: false # Disables the ErrorListener, ExceptionListener and FatalErrorListener integrations of the base PHP SDK + +monolog: + handlers: + sentry: + type: sentry + level: !php/const Monolog\Logger::ERROR + hub_id: Sentry\State\HubInterface + `, + }, + { + description: ( +

+ {tct( + 'f you are using a version of [monologBundleLink:MonologBundle] prior to [code:3.7], you need to configure the handler as a service instead:', + { + monologBundleLink: ( + + ), + code: , + } + )} +

+ ), + language: 'yaml', + code: ` +monolog: + handlers: + sentry: + type: service + id: Sentry\Monolog\Handler + +services: + Sentry\Monolog\Handler: + arguments: + $hub: '@Sentry\State\HubInterface' + $level: !php/const Monolog\Logger::ERROR + `, + }, + { + description: ( +

+ {tct( + 'Additionally, you can register the [code:PsrLogMessageProcessor] to resolve PSR-3 placeholders in reported messages:', + { + code: , + } + )} +

+ ), + language: 'yaml', + code: ` +services: + Monolog\Processor\PsrLogMessageProcessor: + tags: { name: monolog.processor, handler: sentry } + `, + }, + ], + }, +]; +// Configuration End + +export function GettingStartedWithSymfony({dsn, ...props}: ModuleProps) { + return ; +} + +export default GettingStartedWithSymfony; From 2f1a89eca4434a5cb052746323390dfd1f5d3a0f Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Tue, 18 Jul 2023 09:44:53 +0200 Subject: [PATCH 65/67] ref(getting-started-docs): Migrate ruby doc to sentry main repo (#52973) --- .../gettingStartedDoc/sdkDocumentation.tsx | 3 + .../app/gettingStartedDocs/ruby/rack.spec.tsx | 20 +++++ static/app/gettingStartedDocs/ruby/rack.tsx | 71 +++++++++++++++ .../gettingStartedDocs/ruby/rails.spec.tsx | 20 +++++ static/app/gettingStartedDocs/ruby/rails.tsx | 89 +++++++++++++++++++ .../app/gettingStartedDocs/ruby/ruby.spec.tsx | 20 +++++ static/app/gettingStartedDocs/ruby/ruby.tsx | 85 ++++++++++++++++++ 7 files changed, 308 insertions(+) create mode 100644 static/app/gettingStartedDocs/ruby/rack.spec.tsx create mode 100644 static/app/gettingStartedDocs/ruby/rack.tsx create mode 100644 static/app/gettingStartedDocs/ruby/rails.spec.tsx create mode 100644 static/app/gettingStartedDocs/ruby/rails.tsx create mode 100644 static/app/gettingStartedDocs/ruby/ruby.spec.tsx create mode 100644 static/app/gettingStartedDocs/ruby/ruby.tsx diff --git a/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx b/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx index c057a48500db91..cea134a040786f 100644 --- a/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx +++ b/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx @@ -30,6 +30,9 @@ export const migratedDocs = [ 'minidump', 'native', 'native-qt', + 'ruby', + 'ruby-rails', + 'ruby-rack', 'kotlin', 'node', ]; diff --git a/static/app/gettingStartedDocs/ruby/rack.spec.tsx b/static/app/gettingStartedDocs/ruby/rack.spec.tsx new file mode 100644 index 00000000000000..2bc45b6ff645ce --- /dev/null +++ b/static/app/gettingStartedDocs/ruby/rack.spec.tsx @@ -0,0 +1,20 @@ +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import {StepTitle} from 'sentry/components/onboarding/gettingStartedDoc/step'; + +import {GettingStartedWithRubyRack, steps} from './rack'; + +describe('GettingStartedWithRubyRack', function () { + it('renders doc correctly', function () { + const {container} = render(); + + // Steps + for (const step of steps()) { + expect( + screen.getByRole('heading', {name: step.title ?? StepTitle[step.type]}) + ).toBeInTheDocument(); + } + + expect(container).toSnapshot(); + }); +}); diff --git a/static/app/gettingStartedDocs/ruby/rack.tsx b/static/app/gettingStartedDocs/ruby/rack.tsx new file mode 100644 index 00000000000000..9a048dd01368b6 --- /dev/null +++ b/static/app/gettingStartedDocs/ruby/rack.tsx @@ -0,0 +1,71 @@ +import {Layout, LayoutProps} from 'sentry/components/onboarding/gettingStartedDoc/layout'; +import {ModuleProps} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation'; +import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step'; +import {tct} from 'sentry/locale'; + +// Configuration Start +export const steps = ({ + dsn, +}: { + dsn?: string; +} = {}): LayoutProps['steps'] => [ + { + type: StepType.INSTALL, + description: ( +

+ {tct('Install the SDK via Rubygems by adding it to your [code:Gemfile]:', { + code: , + })} +

+ ), + configurations: [ + { + language: 'ruby', + code: `gem "sentry-ruby"`, + }, + ], + }, + { + type: StepType.CONFIGURE, + description: ( +

+ {tct( + 'Add use [code:Sentry::Rack::CaptureExceptions] to your [code:config.ru] or other rackup file (this is automatically inserted in Rails):', + { + code: , + } + )} +

+ ), + configurations: [ + { + language: 'ruby', + code: ` +require 'sentry-ruby' + +Sentry.init do |config| + config.dsn = '${dsn}' + + # Set traces_sample_rate to 1.0 to capture 100% + # of transactions for performance monitoring. + # We recommend adjusting this value in production. + config.traces_sample_rate = 1.0 + # or + config.traces_sampler = lambda do |context| + true + end +end + +use Sentry::Rack::CaptureExceptions + `, + }, + ], + }, +]; +// Configuration End + +export function GettingStartedWithRubyRack({dsn, ...props}: ModuleProps) { + return ; +} + +export default GettingStartedWithRubyRack; diff --git a/static/app/gettingStartedDocs/ruby/rails.spec.tsx b/static/app/gettingStartedDocs/ruby/rails.spec.tsx new file mode 100644 index 00000000000000..a28ab9b8db675c --- /dev/null +++ b/static/app/gettingStartedDocs/ruby/rails.spec.tsx @@ -0,0 +1,20 @@ +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import {StepTitle} from 'sentry/components/onboarding/gettingStartedDoc/step'; + +import {GettingStartedWithRubyRails, steps} from './rails'; + +describe('GettingStartedWithRubyRails', function () { + it('renders doc correctly', function () { + const {container} = render(); + + // Steps + for (const step of steps()) { + expect( + screen.getByRole('heading', {name: step.title ?? StepTitle[step.type]}) + ).toBeInTheDocument(); + } + + expect(container).toSnapshot(); + }); +}); diff --git a/static/app/gettingStartedDocs/ruby/rails.tsx b/static/app/gettingStartedDocs/ruby/rails.tsx new file mode 100644 index 00000000000000..b85c217a9e3226 --- /dev/null +++ b/static/app/gettingStartedDocs/ruby/rails.tsx @@ -0,0 +1,89 @@ +import {Fragment} from 'react'; + +import {Layout, LayoutProps} from 'sentry/components/onboarding/gettingStartedDoc/layout'; +import {ModuleProps} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation'; +import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step'; +import {t, tct} from 'sentry/locale'; + +// Configuration Start +const introduction = ( + + {t('In Rails, all uncaught exceptions will be automatically reported.')} + {t('We support Rails 5 and newer.')} + +); +export const steps = ({ + dsn, +}: { + dsn?: string; +} = {}): LayoutProps['steps'] => [ + { + type: StepType.INSTALL, + description: ( +

+ {tct('Add [code:sentry-ruby] and [code:sentry-rails] to your [code:Gemfile]:', { + code: , + })} +

+ ), + configurations: [ + { + language: 'ruby', + code: ` +gem "sentry-ruby" +gem "sentry-rails" + `, + }, + ], + }, + { + type: StepType.CONFIGURE, + description: ( +

+ {tct('Initialize the SDK within your [code:config/initializers/sentry.rb]:', { + code: , + })} +

+ ), + configurations: [ + { + language: 'ruby', + code: ` +Sentry.init do |config| + config.dsn = '${dsn}' + config.breadcrumbs_logger = [:active_support_logger, :http_logger] + + # Set traces_sample_rate to 1.0 to capture 100% + # of transactions for performance monitoring. + # We recommend adjusting this value in production. + config.traces_sample_rate = 1.0 + # or + config.traces_sampler = lambda do |context| + true + end +end + `, + }, + ], + }, + { + title: t('Caveats'), + description: ( +

+ {tct( + 'Currently, custom exception applications [code:(config.exceptions_app)] are not supported. If you are using a custom exception app, you must manually integrate Sentry yourself.', + { + code: , + } + )} +

+ ), + }, +]; +// Configuration End + +export function GettingStartedWithRubyRails({dsn, ...props}: ModuleProps) { + return ; +} + +export default GettingStartedWithRubyRails; diff --git a/static/app/gettingStartedDocs/ruby/ruby.spec.tsx b/static/app/gettingStartedDocs/ruby/ruby.spec.tsx new file mode 100644 index 00000000000000..3c940bde800b88 --- /dev/null +++ b/static/app/gettingStartedDocs/ruby/ruby.spec.tsx @@ -0,0 +1,20 @@ +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import {StepTitle} from 'sentry/components/onboarding/gettingStartedDoc/step'; + +import {GettingStartedWithRuby, steps} from './ruby'; + +describe('GettingStartedWithRuby', function () { + it('renders doc correctly', function () { + const {container} = render(); + + // Steps + for (const step of steps()) { + expect( + screen.getByRole('heading', {name: step.title ?? StepTitle[step.type]}) + ).toBeInTheDocument(); + } + + expect(container).toSnapshot(); + }); +}); diff --git a/static/app/gettingStartedDocs/ruby/ruby.tsx b/static/app/gettingStartedDocs/ruby/ruby.tsx new file mode 100644 index 00000000000000..ca47af80ab46b3 --- /dev/null +++ b/static/app/gettingStartedDocs/ruby/ruby.tsx @@ -0,0 +1,85 @@ +import {Layout, LayoutProps} from 'sentry/components/onboarding/gettingStartedDoc/layout'; +import {ModuleProps} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation'; +import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step'; +import {t, tct} from 'sentry/locale'; + +// Configuration Start +export const steps = ({ + dsn, +}: { + dsn?: string; +} = {}): LayoutProps['steps'] => [ + { + type: StepType.INSTALL, + description: ( +

+ {tct( + 'Sentry Ruby comes as a gem and is straightforward to install. If you are using Bundler just add this to your [code:Gemfile]:', + {code: } + )} +

+ ), + configurations: [ + { + language: 'ruby', + code: 'gem "sentry-ruby"', + }, + ], + }, + { + type: StepType.CONFIGURE, + description: ( +

+ {tct( + 'To use Sentry Ruby all you need is your DSN. Like most Sentry libraries it will honor the [sentryDSN:SENTRY_DSN] environment variable. You can find it on the project settings page under API Keys. You can either export it as environment variable or manually configure it with [sentryInit:Sentry.init]:', + {sentryDSN: , sentryInit: } + )} +

+ ), + configurations: [ + { + language: 'ruby', + code: ` +Sentry.init do |config| + config.dsn = '${dsn}' + + # Set traces_sample_rate to 1.0 to capture 100% + # of transactions for performance monitoring. + # We recommend adjusting this value in production. + config.traces_sample_rate = 1.0 + # or + config.traces_sampler = lambda do |context| + true + end +end + `, + }, + ], + }, + { + type: StepType.VERIFY, + description: t('You can then report errors or messages to Sentry:'), + configurations: [ + { + language: 'ruby', + + code: ` +begin + 1 / 0 +rescue ZeroDivisionError => exception + Sentry.capture_exception(exception) +end + +Sentry.capture_message("test message") + `, + }, + ], + }, +]; +// Configuration End + +export function GettingStartedWithRuby({dsn, ...props}: ModuleProps) { + return ; +} + +export default GettingStartedWithRuby; From ab710c7b7453a53377f9544cfc6f4046adaf9294 Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Tue, 18 Jul 2023 09:45:11 +0200 Subject: [PATCH 66/67] ref(getting-started-docs): Migrate python fra. docs to sentry main repo (#52975) --- .../gettingStartedDoc/sdkDocumentation.tsx | 4 + .../python/starlette.spec.tsx | 20 ++++ .../gettingStartedDocs/python/starlette.tsx | 111 ++++++++++++++++++ .../python/tornado.spec.tsx | 20 ++++ .../app/gettingStartedDocs/python/tornado.tsx | 80 +++++++++++++ .../app/gettingStartedDocs/python/tryton.tsx | 89 ++++++++++++++ .../gettingStartedDocs/python/wsgi.spec.tsx | 20 ++++ static/app/gettingStartedDocs/python/wsgi.tsx | 64 ++++++++++ 8 files changed, 408 insertions(+) create mode 100644 static/app/gettingStartedDocs/python/starlette.spec.tsx create mode 100644 static/app/gettingStartedDocs/python/starlette.tsx create mode 100644 static/app/gettingStartedDocs/python/tornado.spec.tsx create mode 100644 static/app/gettingStartedDocs/python/tornado.tsx create mode 100644 static/app/gettingStartedDocs/python/tryton.tsx create mode 100644 static/app/gettingStartedDocs/python/wsgi.spec.tsx create mode 100644 static/app/gettingStartedDocs/python/wsgi.tsx diff --git a/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx b/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx index cea134a040786f..ba31b2c1e3f279 100644 --- a/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx +++ b/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx @@ -20,6 +20,10 @@ export const migratedDocs = [ 'javascript', 'python-django', 'python-flask', + 'python-wsgi', + 'python-tryton', + 'python-tornado', + 'python-starlette', 'react-native', 'java-spring-boot', 'php', diff --git a/static/app/gettingStartedDocs/python/starlette.spec.tsx b/static/app/gettingStartedDocs/python/starlette.spec.tsx new file mode 100644 index 00000000000000..cb07fe3381e80f --- /dev/null +++ b/static/app/gettingStartedDocs/python/starlette.spec.tsx @@ -0,0 +1,20 @@ +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import {StepTitle} from 'sentry/components/onboarding/gettingStartedDoc/step'; + +import {GettingStartedWithStarlette, steps} from './starlette'; + +describe('GettingStartedWithDjango', function () { + it('renders doc correctly', function () { + const {container} = render(); + + // Steps + for (const step of steps()) { + expect( + screen.getByRole('heading', {name: step.title ?? StepTitle[step.type]}) + ).toBeInTheDocument(); + } + + expect(container).toSnapshot(); + }); +}); diff --git a/static/app/gettingStartedDocs/python/starlette.tsx b/static/app/gettingStartedDocs/python/starlette.tsx new file mode 100644 index 00000000000000..2a2567a07864f7 --- /dev/null +++ b/static/app/gettingStartedDocs/python/starlette.tsx @@ -0,0 +1,111 @@ +import ExternalLink from 'sentry/components/links/externalLink'; +import {Layout, LayoutProps} from 'sentry/components/onboarding/gettingStartedDoc/layout'; +import {ModuleProps} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation'; +import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step'; +import {t, tct} from 'sentry/locale'; + +// Configuration Start +const introduction = tct( + 'The Starlette integration adds support for the Starlette Framework.', + { + link: , + } +); +export const steps = ({ + dsn, +}: { + dsn?: string; +} = {}): LayoutProps['steps'] => [ + { + type: StepType.INSTALL, + description: ( +

+ {tct('Install [code:sentry-sdk] from PyPI with the [code:starlette] extra:', { + code: , + })} +

+ ), + configurations: [ + { + language: 'bash', + code: "pip install --upgrade 'sentry-sdk[starlette]'", + }, + ], + }, + { + type: StepType.CONFIGURE, + description: ( +

+ {tct( + 'To configure the SDK, initialize it before your app has been initialized. The Sentry SDK automatically enables support for Starlette if you have the [code:starlette] Python package installed in your project. There are no configuration options you need to add when initializing the Sentry SDK as everything works out of the box:', + {code: } + )} +

+ ), + configurations: [ + { + language: 'python', + code: ` +from starlette.applications import Starlette + +import sentry_sdk + + +sentry_sdk.init( + dsn="${dsn}", + + # Set traces_sample_rate to 1.0 to capture 100% + # of transactions for performance monitoring. + # We recommend adjusting this value in production, + traces_sample_rate=1.0, +) + +app = Starlette(routes=[...]) + `, + }, + ], + additionalInfo: ( +

+ {tct( + 'The above configuration captures both error and performance data. To reduce the volume of performance data captured, change [code:traces_sample_rate] to a value between 0 and 1.', + { + code: , + } + )} +

+ ), + }, + { + type: StepType.VERIFY, + description: t( + 'You can easily verify your Sentry installation by creating a route that triggers an error:' + ), + configurations: [ + { + language: 'python', + code: ` +from starlette.applications import Starlette +from starlette.routing import Route + + +async def trigger_error(request): + division_by_zero = 1 / 0 + +app = Starlette(routes=[ + Route("/sentry-debug", trigger_error), +]) + `, + additionalInfo: t( + 'Visiting this route will trigger an error that will be captured by Sentry.' + ), + }, + ], + }, +]; +// Configuration End + +export function GettingStartedWithStarlette({dsn, ...props}: ModuleProps) { + return ; +} + +export default GettingStartedWithStarlette; diff --git a/static/app/gettingStartedDocs/python/tornado.spec.tsx b/static/app/gettingStartedDocs/python/tornado.spec.tsx new file mode 100644 index 00000000000000..55c07396966c19 --- /dev/null +++ b/static/app/gettingStartedDocs/python/tornado.spec.tsx @@ -0,0 +1,20 @@ +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import {StepTitle} from 'sentry/components/onboarding/gettingStartedDoc/step'; + +import {GettingStartedWithTornado, steps} from './tornado'; + +describe('GettingStartedWithTornado', function () { + it('renders doc correctly', function () { + const {container} = render(); + + // Steps + for (const step of steps()) { + expect( + screen.getByRole('heading', {name: step.title ?? StepTitle[step.type]}) + ).toBeInTheDocument(); + } + + expect(container).toSnapshot(); + }); +}); diff --git a/static/app/gettingStartedDocs/python/tornado.tsx b/static/app/gettingStartedDocs/python/tornado.tsx new file mode 100644 index 00000000000000..b6144102117c92 --- /dev/null +++ b/static/app/gettingStartedDocs/python/tornado.tsx @@ -0,0 +1,80 @@ +import ExternalLink from 'sentry/components/links/externalLink'; +import {Layout, LayoutProps} from 'sentry/components/onboarding/gettingStartedDoc/layout'; +import {ModuleProps} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation'; +import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step'; +import {t, tct} from 'sentry/locale'; + +// Configuration Start +const introduction = tct( + 'The Tornado integration adds support for the [link:Tornado Web Framework]. A Tornado version of 5 or greater and Python 3.6 or greater is required.', + { + link: , + } +); +export const steps = ({ + dsn, +}: { + dsn?: string; +} = {}): LayoutProps['steps'] => [ + { + type: StepType.INSTALL, + description:

{tct('Install [code:sentry-sdk] from PyPI:', {code: })}

, + configurations: [ + { + language: 'bash', + code: '$ pip install --upgrade sentry-sdk', + }, + { + description: ( +

+ {tct( + "If you're on Python 3.6, you also need the [code:aiocontextvars] package:", + { + code: , + } + )} +

+ ), + language: 'bash', + code: '$ pip install --upgrade aiocontextvars', + }, + ], + }, + { + type: StepType.CONFIGURE, + description: t('Initialize the SDK before starting the server:'), + configurations: [ + { + language: 'python', + code: ` +import sentry_sdk +from sentry_sdk.integrations.tornado import TornadoIntegration + +sentry_sdk.init( + dsn="${dsn}", + integrations=[ + TornadoIntegration(), + ], + + # Set traces_sample_rate to 1.0 to capture 100% + # of transactions for performance monitoring. + # We recommend adjusting this value in production, + traces_sample_rate=1.0, +) + +# Your app code here, without changes + +class MyHandler(...): +... + `, + }, + ], + }, +]; +// Configuration End + +export function GettingStartedWithTornado({dsn, ...props}: ModuleProps) { + return ; +} + +export default GettingStartedWithTornado; diff --git a/static/app/gettingStartedDocs/python/tryton.tsx b/static/app/gettingStartedDocs/python/tryton.tsx new file mode 100644 index 00000000000000..27853d836e9963 --- /dev/null +++ b/static/app/gettingStartedDocs/python/tryton.tsx @@ -0,0 +1,89 @@ +import ExternalLink from 'sentry/components/links/externalLink'; +import {Layout, LayoutProps} from 'sentry/components/onboarding/gettingStartedDoc/layout'; +import {ModuleProps} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation'; +import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step'; +import {t, tct} from 'sentry/locale'; + +// Configuration Start +const introduction = ( +

+ {tct('The Tryton integration adds support for the [link:Tryton Framework Server].', { + link: , + })} +

+); + +export const steps = ({ + dsn, +}: { + dsn?: string; +} = {}): LayoutProps['steps'] => [ + { + type: StepType.CONFIGURE, + description: ( +

+ {tct( + 'To configure the SDK, initialize it with the integration in a custom [code:wsgi.py] script:', + { + code: , + } + )} +

+ ), + configurations: [ + { + language: 'python', + code: ` +# wsgi.py +import sentry_sdk +import sentry_sdk.integrations.trytond + +sentry_sdk.init( + dsn="${dsn}", + integrations=[ + sentry_sdk.integrations.trytond.TrytondWSGIIntegration(), + ], + + # Set traces_sample_rate to 1.0 to capture 100% + # of transactions for performance monitoring. + # We recommend adjusting this value in production, + traces_sample_rate=1.0, +) + +from trytond.application import app as application + +# ... + `, + }, + { + description: t( + 'In Tryton>=5.4 an error handler can be registered to respond the client with a custom error message including the Sentry event id instead of a traceback.' + ), + language: 'python', + code: ` +# wsgi.py +# ... + +from trytond.exceptions import TrytonException +from trytond.exceptions import UserError + +@application.error_handler +def _(app, request, e): + if isinstance(e, TrytonException): + return + else: + event_id = sentry_sdk.last_event_id() + data = UserError('Custom message', f'{event_id}{e}') + return app.make_response(request, data) + `, + }, + ], + }, +]; +// Configuration End + +export function GettingStartedWithTryton({dsn, ...props}: ModuleProps) { + return ; +} + +export default GettingStartedWithTryton; diff --git a/static/app/gettingStartedDocs/python/wsgi.spec.tsx b/static/app/gettingStartedDocs/python/wsgi.spec.tsx new file mode 100644 index 00000000000000..c21523a9f12477 --- /dev/null +++ b/static/app/gettingStartedDocs/python/wsgi.spec.tsx @@ -0,0 +1,20 @@ +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import {StepTitle} from 'sentry/components/onboarding/gettingStartedDoc/step'; + +import {GettingStartedWithWSGI, steps} from './wsgi'; + +describe('GettingStartedWithWSGI', function () { + it('renders doc correctly', function () { + const {container} = render(); + + // Steps + for (const step of steps()) { + expect( + screen.getByRole('heading', {name: step.title ?? StepTitle[step.type]}) + ).toBeInTheDocument(); + } + + expect(container).toSnapshot(); + }); +}); diff --git a/static/app/gettingStartedDocs/python/wsgi.tsx b/static/app/gettingStartedDocs/python/wsgi.tsx new file mode 100644 index 00000000000000..3cb90fc32f21d4 --- /dev/null +++ b/static/app/gettingStartedDocs/python/wsgi.tsx @@ -0,0 +1,64 @@ +import {Fragment} from 'react'; + +import ExternalLink from 'sentry/components/links/externalLink'; +import {Layout, LayoutProps} from 'sentry/components/onboarding/gettingStartedDoc/layout'; +import {ModuleProps} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation'; +import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step'; +import {t, tct} from 'sentry/locale'; + +// Configuration Start +export const steps = ({ + dsn, +}: { + dsn?: string; +} = {}): LayoutProps['steps'] => [ + { + type: StepType.INSTALL, + description: ( + +

+ {tct( + 'It is recommended to use an [link:integration for your particular WSGI framework if available], as those are easier to use and capture more useful information.', + { + link: ( + + ), + } + )} +

+ {t( + 'If you use a WSGI framework not directly supported by the SDK, or wrote a raw WSGI app, you can use this generic WSGI middleware. It captures errors and attaches a basic amount of information for incoming requests.' + )} +
+ ), + configurations: [ + { + language: 'python', + code: ` +import sentry_sdk +from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware + +from myapp import wsgi_app + +sentry_sdk.init( + dsn="${dsn}", + + # Set traces_sample_rate to 1.0 to capture 100% + # of transactions for performance monitoring. + # We recommend adjusting this value in production, + traces_sample_rate=1.0, +) + +wsgi_app = SentryWsgiMiddleware(wsgi_app) + `, + }, + ], + }, +]; +// Configuration End + +export function GettingStartedWithWSGI({dsn, ...props}: ModuleProps) { + return ; +} + +export default GettingStartedWithWSGI; From 54a1d8faf25587fc3c731d9e4e18089d4c2ccc08 Mon Sep 17 00:00:00 2001 From: Radu Woinaroski <5281987+RaduW@users.noreply.github.com> Date: Tue, 18 Jul 2023 10:05:38 +0200 Subject: [PATCH 67/67] ref(dynamic-sampling) Implement boost_low_volume_transactions with timed iterators (#52976) --- .../tasks/boost_low_volume_projects.py | 5 +- .../tasks/boost_low_volume_transactions.py | 632 ++++++++++-------- .../dynamic_sampling/tasks/collect_orgs.py | 5 +- src/sentry/dynamic_sampling/tasks/common.py | 50 +- src/sentry/options/defaults.py | 2 +- .../test_boost_low_volume_transactions.py | 16 +- .../dynamic_sampling/tasks/test_common.py | 7 +- 7 files changed, 425 insertions(+), 292 deletions(-) diff --git a/src/sentry/dynamic_sampling/tasks/boost_low_volume_projects.py b/src/sentry/dynamic_sampling/tasks/boost_low_volume_projects.py index b7ecc786a5fd7c..df72918a5211fd 100644 --- a/src/sentry/dynamic_sampling/tasks/boost_low_volume_projects.py +++ b/src/sentry/dynamic_sampling/tasks/boost_low_volume_projects.py @@ -78,12 +78,9 @@ def boost_low_volume_projects() -> None: context = TaskContext("sentry.dynamic_sampling.tasks.boost_low_volume_projects", MAX_SECONDS) fetch_projects_timer = Timer() - iterator_name = GetActiveOrgs.__name__ try: - for orgs in TimedIterator( - context, iterator_name, GetActiveOrgs(max_projects=MAX_PROJECTS_PER_QUERY) - ): + for orgs in TimedIterator(context, GetActiveOrgs(max_projects=MAX_PROJECTS_PER_QUERY)): for ( org_id, projects_with_tx_count_and_rates, diff --git a/src/sentry/dynamic_sampling/tasks/boost_low_volume_transactions.py b/src/sentry/dynamic_sampling/tasks/boost_low_volume_transactions.py index 64f0f1c9714245..89251e8da52e2c 100644 --- a/src/sentry/dynamic_sampling/tasks/boost_low_volume_transactions.py +++ b/src/sentry/dynamic_sampling/tasks/boost_low_volume_transactions.py @@ -1,7 +1,7 @@ -import time from datetime import datetime -from typing import Callable, Iterator, List, Optional, Tuple, TypedDict +from typing import Callable, Dict, Iterator, List, Optional, Sequence, Tuple, TypedDict, Union, cast +from sentry_sdk import capture_message, set_extra from snuba_sdk import ( AliasedExpression, Column, @@ -26,11 +26,11 @@ is_sliding_window_enabled, is_sliding_window_org_enabled, ) +from sentry.dynamic_sampling.tasks.common import GetActiveOrgs, TimedIterator, TimeoutException from sentry.dynamic_sampling.tasks.constants import ( BOOST_LOW_VOLUME_TRANSACTIONS_QUERY_INTERVAL, CHUNK_SIZE, DEFAULT_REDIS_CACHE_KEY_TTL, - MAX_ORGS_PER_QUERY, MAX_PROJECTS_PER_QUERY, MAX_SECONDS, ) @@ -41,8 +41,13 @@ set_transactions_resampling_rates, ) from sentry.dynamic_sampling.tasks.helpers.sliding_window import get_sliding_window_sample_rate -from sentry.dynamic_sampling.tasks.logging import log_query_timeout, log_sample_rate_source -from sentry.dynamic_sampling.tasks.utils import dynamic_sampling_task +from sentry.dynamic_sampling.tasks.logging import ( + log_sample_rate_source, + log_task_execution, + log_task_timeout, +) +from sentry.dynamic_sampling.tasks.task_context import DynamicSamplingLogState, TaskContext +from sentry.dynamic_sampling.tasks.utils import Timer, dynamic_sampling_task from sentry.models import Organization from sentry.sentry_metrics import indexer from sentry.snuba.dataset import Dataset, EntityKey @@ -99,22 +104,62 @@ def boost_low_volume_transactions() -> None: options.get("dynamic-sampling.prioritise_transactions.num_explicit_small_transactions") ) - for orgs in get_orgs_with_project_counts(): - # get the low and high transactions - for project_transactions in transactions_zip( - fetch_project_transaction_totals(orgs), - fetch_transactions_with_total_volumes( - orgs, - large_transactions=True, - max_transactions=num_big_trans, - ), - fetch_transactions_with_total_volumes( - orgs, - large_transactions=False, - max_transactions=num_small_trans, - ), - ): - boost_low_volume_transactions_of_project.delay(project_transactions) + context = TaskContext( + "sentry.dynamic_sampling.tasks.boost_low_volume_transactions", MAX_SECONDS + ) + + # create global timers for the internal iterators since they are created multiple times, + # and we are interested in the total time. + get_totals_timer = Timer() + get_small_transactions_timer = Timer() + get_big_transactions_timer = Timer() + get_totals_name = "GetTransactionTotals" + get_volumes_small = "GetTransactionVolumes(small)" + get_volumes_big = "GetTransactionVolumes(big)" + + try: + orgs_iterator = TimedIterator(context, GetActiveOrgs(max_projects=MAX_PROJECTS_PER_QUERY)) + for orgs in orgs_iterator: + # get the low and high transactions + totals_it = TimedIterator( + context=context, + inner=FetchProjectTransactionTotals(orgs), + name=get_totals_name, + timer=get_totals_timer, + ) + small_transactions_it = TimedIterator( + context=context, + inner=FetchProjectTransactionVolumes( + orgs, + large_transactions=False, + max_transactions=num_small_trans, + ), + name=get_volumes_small, + timer=get_small_transactions_timer, + ) + big_transactions_it = TimedIterator( + context=context, + inner=FetchProjectTransactionVolumes( + orgs, + large_transactions=True, + max_transactions=num_big_trans, + ), + name=get_volumes_big, + timer=get_big_transactions_timer, + ) + + for project_transactions in transactions_zip( + totals_it, big_transactions_it, small_transactions_it + ): + boost_low_volume_transactions_of_project.delay(project_transactions) + except TimeoutException: + set_extra("context-data", context.to_dict()) + log_task_timeout(context) + raise + else: + set_extra("context-data", context.to_dict()) + capture_message("sentry.dynamic_sampling.tasks.boost_low_volume_transactions") + log_task_execution(context) @instrumented_task( @@ -218,172 +263,151 @@ def is_project_identity_before(left: ProjectIdentity, right: ProjectIdentity) -> ) -def get_orgs_with_project_counts( - max_orgs: int = MAX_ORGS_PER_QUERY, max_projects: int = MAX_PROJECTS_PER_QUERY -) -> Iterator[List[int]]: +class FetchProjectTransactionTotals: """ - Fetch organisations in batches. - A batch will return at max max_orgs elements - It will accumulate org ids in the list until either it accumulates max_orgs or the - number of projects in the already accumulated orgs is more than max_projects or there - are no more orgs + Fetches the total number of transactions and the number of distinct transaction types for each + project in the given organisations """ - start_time = time.time() - metric_id = indexer.resolve_shared_org(str(TransactionMRI.COUNT_PER_ROOT_PROJECT.value)) - offset = 0 - - last_result: List[Tuple[int, int]] = [] - while (time.time() - start_time) < MAX_SECONDS: - query = ( - Query( - match=Entity(EntityKey.GenericOrgMetricsCounters.value), - select=[ - Function("uniq", [Column("project_id")], "num_projects"), - Column("org_id"), - ], - groupby=[ - Column("org_id"), - ], - where=[ - Condition( - Column("timestamp"), - Op.GTE, - datetime.utcnow() - BOOST_LOW_VOLUME_TRANSACTIONS_QUERY_INTERVAL, - ), - Condition(Column("timestamp"), Op.LT, datetime.utcnow()), - Condition(Column("metric_id"), Op.EQ, metric_id), - ], - granularity=Granularity(3600), - orderby=[ - OrderBy(Column("org_id"), Direction.ASC), - ], - ) - .set_limit(CHUNK_SIZE + 1) - .set_offset(offset) - ) - request = Request( - dataset=Dataset.PerformanceMetrics.value, app_id="dynamic_sampling", query=query - ) - data = raw_snql_query( - request, - referrer=Referrer.DYNAMIC_SAMPLING_COUNTERS_FETCH_PROJECTS_WITH_COUNT_PER_TRANSACTION.value, - )["data"] - count = len(data) - more_results = count > CHUNK_SIZE - offset += CHUNK_SIZE - if more_results: - data = data[:-1] - for row in data: - last_result.append((row["org_id"], row["num_projects"])) - - first_idx = 0 - count_projects = 0 - for idx, (org_id, num_projects) in enumerate(last_result): - count_projects += num_projects - if idx - first_idx >= max_orgs - 1 or count_projects >= max_projects: - # we got to the number of elements desired - yield [o for o, _ in last_result[first_idx : idx + 1]] - first_idx = idx + 1 - count_projects = 0 - - # keep what is left unused from last_result for the next iteration or final result - last_result = last_result[first_idx:] - if not more_results: - break - else: - log_query_timeout( - query="get_orgs_with_project_counts", offset=offset, timeout_seconds=MAX_SECONDS - ) - if len(last_result) > 0: - yield [org_id for org_id, _ in last_result] + def __init__(self, orgs: Sequence[int]): + self.log_state: Optional[DynamicSamplingLogState] = None + transaction_string_id = indexer.resolve_shared_org("transaction") + self.transaction_tag = f"tags_raw[{transaction_string_id}]" + self.metric_id = indexer.resolve_shared_org( + str(TransactionMRI.COUNT_PER_ROOT_PROJECT.value) + ) -def fetch_project_transaction_totals(org_ids: List[int]) -> Iterator[ProjectTransactionsTotals]: - """ - Fetches the total number of transactions and the number of distinct transaction types for each - project in the given organisations - :param org_ids: - :return: an iterator of org_ids - """ - start_time = time.time() - offset = 0 - org_ids = list(org_ids) # just to be sure it is not some other sequence - transaction_string_id = indexer.resolve_shared_org("transaction") - transaction_tag = f"tags_raw[{transaction_string_id}]" - metric_id = indexer.resolve_shared_org(str(TransactionMRI.COUNT_PER_ROOT_PROJECT.value)) - more_results = True - - while more_results and (time.time() - start_time) < MAX_SECONDS: - query = ( - Query( - match=Entity(EntityKey.GenericOrgMetricsCounters.value), - select=[ - Function("sum", [Column("value")], "num_transactions"), - Function("uniq", [Column(transaction_tag)], "num_classes"), - Column("org_id"), - Column("project_id"), - ], - groupby=[ - Column("org_id"), - Column("project_id"), - ], - where=[ - Condition( - Column("timestamp"), - Op.GTE, - datetime.utcnow() - BOOST_LOW_VOLUME_TRANSACTIONS_QUERY_INTERVAL, - ), - Condition(Column("timestamp"), Op.LT, datetime.utcnow()), - Condition(Column("metric_id"), Op.EQ, metric_id), - Condition(Column("org_id"), Op.IN, org_ids), - ], - granularity=Granularity(3600), - orderby=[ - OrderBy(Column("org_id"), Direction.ASC), - OrderBy(Column("project_id"), Direction.ASC), - ], + self.org_ids = list(orgs) + self.offset = 0 + self.has_more_results = True + self.cache: List[Dict[str, Union[int, float]]] = [] + self.last_org_id: Optional[int] = None + + def __iter__(self): + return self + + def __next__(self): + + self._ensure_log_state() + assert self.log_state is not None + + if not self._cache_empty(): + return self._get_from_cache() + + if self.has_more_results: + query = ( + Query( + match=Entity(EntityKey.GenericOrgMetricsCounters.value), + select=[ + Function("sum", [Column("value")], "num_transactions"), + Function("uniq", [Column(self.transaction_tag)], "num_classes"), + Column("org_id"), + Column("project_id"), + ], + groupby=[ + Column("org_id"), + Column("project_id"), + ], + where=[ + Condition( + Column("timestamp"), + Op.GTE, + datetime.utcnow() - BOOST_LOW_VOLUME_TRANSACTIONS_QUERY_INTERVAL, + ), + Condition(Column("timestamp"), Op.LT, datetime.utcnow()), + Condition(Column("metric_id"), Op.EQ, self.metric_id), + Condition(Column("org_id"), Op.IN, self.org_ids), + ], + granularity=Granularity(3600), + orderby=[ + OrderBy(Column("org_id"), Direction.ASC), + OrderBy(Column("project_id"), Direction.ASC), + ], + ) + .set_limit(CHUNK_SIZE + 1) + .set_offset(self.offset) ) - .set_limit(CHUNK_SIZE + 1) - .set_offset(offset) - ) - request = Request( - dataset=Dataset.PerformanceMetrics.value, app_id="dynamic_sampling", query=query - ) - data = raw_snql_query( - request, - referrer=Referrer.DYNAMIC_SAMPLING_COUNTERS_FETCH_PROJECTS_WITH_TRANSACTION_TOTALS.value, - )["data"] - count = len(data) - more_results = count > CHUNK_SIZE - offset += CHUNK_SIZE + request = Request( + dataset=Dataset.PerformanceMetrics.value, app_id="dynamic_sampling", query=query + ) + data = raw_snql_query( + request, + referrer=Referrer.DYNAMIC_SAMPLING_COUNTERS_FETCH_PROJECTS_WITH_TRANSACTION_TOTALS.value, + )["data"] + count = len(data) + self.has_more_results = count > CHUNK_SIZE + self.offset += CHUNK_SIZE - if more_results: - data = data[:-1] + if self.has_more_results: + data = data[:-1] - for row in data: - proj_id = row["project_id"] - org_id = row["org_id"] - num_transactions = row["num_transactions"] - num_classes = row["num_classes"] - yield { - "project_id": proj_id, - "org_id": org_id, - "total_num_transactions": num_transactions, - "total_num_classes": num_classes, - } + self.log_state.num_rows_total += count + self.log_state.num_db_calls += 1 - else: - log_query_timeout( - query="fetch_project_transaction_totals", offset=offset, timeout_seconds=MAX_SECONDS - ) + self.cache.extend(data) - return None + return self._get_from_cache() + def _get_from_cache(self): -def fetch_transactions_with_total_volumes( - org_ids: List[int], large_transactions: bool, max_transactions: int -) -> Iterator[ProjectTransactions]: + if self._cache_empty(): + raise StopIteration() + + self._ensure_log_state() + + assert self.log_state is not None + + row = self.cache.pop(0) + proj_id = row["project_id"] + org_id = row["org_id"] + num_transactions = row["num_transactions"] + num_classes = row["num_classes"] + + self.log_state.num_projects += 1 + + if self.last_org_id != org_id: + self.last_org_id = cast(int, org_id) + self.log_state.num_orgs += 1 + + return { + "project_id": proj_id, + "org_id": org_id, + "total_num_transactions": num_transactions, + "total_num_classes": num_classes, + } + + def _cache_empty(self): + return not self.cache + + def _ensure_log_state(self): + if self.log_state is None: + self.log_state = DynamicSamplingLogState() + + def get_current_state(self): + """ + Returns the current state of the iterator (how many orgs and projects it has iterated over) + + part of the ContexIterator protocol + + """ + self._ensure_log_state() + + return self.log_state + + def set_current_state(self, log_state: DynamicSamplingLogState) -> None: + """ + Set the log state from outside (typically immediately after creation) + + part of the ContextIterator protocol + + This is typically used when multiple iterators are concatenated into one logical operation + in order to accumulate results into one state. + """ + self.log_state = log_state + + +class FetchProjectTransactionVolumes: """ Fetch transactions for all orgs and all projects with pagination orgs and projects with count per root project @@ -395,76 +419,110 @@ def fetch_transactions_with_total_volumes( max_transactions: maximum number of transactions to return """ - if max_transactions == 0: - # no transactions required from this end (probably we only need transactions from the other end) - return None + def __init__( + self, + orgs: List[int], + large_transactions: bool, + max_transactions: int, + ): + self.log_state: Optional[DynamicSamplingLogState] = None + + self.large_transactions = large_transactions + self.max_transactions = max_transactions + self.org_ids = orgs + self.offset = 0 + transaction_string_id = indexer.resolve_shared_org("transaction") + self.transaction_tag = f"tags_raw[{transaction_string_id}]" + self.metric_id = indexer.resolve_shared_org( + str(TransactionMRI.COUNT_PER_ROOT_PROJECT.value) + ) + self.has_more_results = True + self.cache: List[ProjectTransactions] = [] - start_time = time.time() - offset = 0 - org_ids = list(org_ids) # just to be sure it is not some other sequence - transaction_string_id = indexer.resolve_shared_org("transaction") - transaction_tag = f"tags_raw[{transaction_string_id}]" - metric_id = indexer.resolve_shared_org(str(TransactionMRI.COUNT_PER_ROOT_PROJECT.value)) - current_org_id: Optional[int] = None - current_proj_id: Optional[int] = None - transaction_counts: List[Tuple[str, float]] = [] - - if large_transactions: - transaction_ordering = Direction.DESC - else: - transaction_ordering = Direction.ASC - - while (time.time() - start_time) < MAX_SECONDS: - query = ( - Query( - match=Entity(EntityKey.GenericOrgMetricsCounters.value), - select=[ - Function("sum", [Column("value")], "num_transactions"), - Column("org_id"), - Column("project_id"), - AliasedExpression(Column(transaction_tag), "transaction_name"), - ], - groupby=[ - Column("org_id"), - Column("project_id"), - AliasedExpression(Column(transaction_tag), "transaction_name"), - ], - where=[ - Condition( - Column("timestamp"), - Op.GTE, - datetime.utcnow() - BOOST_LOW_VOLUME_TRANSACTIONS_QUERY_INTERVAL, - ), - Condition(Column("timestamp"), Op.LT, datetime.utcnow()), - Condition(Column("metric_id"), Op.EQ, metric_id), - Condition(Column("org_id"), Op.IN, org_ids), - ], - granularity=Granularity(3600), - orderby=[ - OrderBy(Column("org_id"), Direction.ASC), - OrderBy(Column("project_id"), Direction.ASC), - OrderBy(Column("num_transactions"), transaction_ordering), - ], + if self.large_transactions: + self.transaction_ordering = Direction.DESC + else: + self.transaction_ordering = Direction.ASC + + def __iter__(self): + return self + + def __next__(self): + + if self.max_transactions == 0: + # the user is not interested in transactions of this type, return nothing. + raise StopIteration() + + if not self._cache_empty(): + # data in cache no need to go to the db + return self._get_from_cache() + + if self.has_more_results: + # still data in the db, load cache + query = ( + Query( + match=Entity(EntityKey.GenericOrgMetricsCounters.value), + select=[ + Function("sum", [Column("value")], "num_transactions"), + Column("org_id"), + Column("project_id"), + AliasedExpression(Column(self.transaction_tag), "transaction_name"), + ], + groupby=[ + Column("org_id"), + Column("project_id"), + AliasedExpression(Column(self.transaction_tag), "transaction_name"), + ], + where=[ + Condition( + Column("timestamp"), + Op.GTE, + datetime.utcnow() - BOOST_LOW_VOLUME_TRANSACTIONS_QUERY_INTERVAL, + ), + Condition(Column("timestamp"), Op.LT, datetime.utcnow()), + Condition(Column("metric_id"), Op.EQ, self.metric_id), + Condition(Column("org_id"), Op.IN, self.org_ids), + ], + granularity=Granularity(3600), + orderby=[ + OrderBy(Column("org_id"), Direction.ASC), + OrderBy(Column("project_id"), Direction.ASC), + OrderBy(Column("num_transactions"), self.transaction_ordering), + ], + ) + .set_limitby( + LimitBy( + columns=[Column("org_id"), Column("project_id")], + count=self.max_transactions, + ) + ) + .set_limit(CHUNK_SIZE + 1) + .set_offset(self.offset) ) - .set_limitby( - LimitBy(columns=[Column("org_id"), Column("project_id")], count=max_transactions) + request = Request( + dataset=Dataset.PerformanceMetrics.value, app_id="dynamic_sampling", query=query ) - .set_limit(CHUNK_SIZE + 1) - .set_offset(offset) - ) - request = Request( - dataset=Dataset.PerformanceMetrics.value, app_id="dynamic_sampling", query=query - ) - data = raw_snql_query( - request, - referrer=Referrer.DYNAMIC_SAMPLING_COUNTERS_FETCH_PROJECTS_WITH_COUNT_PER_TRANSACTION.value, - )["data"] - count = len(data) - more_results = count > CHUNK_SIZE - offset += CHUNK_SIZE - - if more_results: - data = data[:-1] + data = raw_snql_query( + request, + referrer=Referrer.DYNAMIC_SAMPLING_COUNTERS_FETCH_PROJECTS_WITH_COUNT_PER_TRANSACTION.value, + )["data"] + count = len(data) + self.has_more_results = count > CHUNK_SIZE + self.offset += CHUNK_SIZE + + if self.has_more_results: + data = data[:-1] + + self._add_results_to_cache(data) + + # return from cache if empty stops iteration + return self._get_from_cache() + + def _add_results_to_cache(self, data): + transaction_counts: List[Tuple[str, float]] = [] + current_org_id: Optional[int] = None + current_proj_id: Optional[int] = None + for row in data: proj_id = row["project_id"] org_id = row["org_id"] @@ -472,43 +530,73 @@ def fetch_transactions_with_total_volumes( num_transactions = row["num_transactions"] if current_proj_id != proj_id or current_org_id != org_id: if ( - len(transaction_counts) > 0 + transaction_counts and current_proj_id is not None and current_org_id is not None ): - yield { - "project_id": current_proj_id, - "org_id": current_org_id, - "transaction_counts": transaction_counts, - "total_num_transactions": None, - "total_num_classes": None, - } + self.cache.append( + { + "project_id": current_proj_id, + "org_id": current_org_id, + "transaction_counts": transaction_counts, + "total_num_transactions": None, + "total_num_classes": None, + } + ) transaction_counts = [] current_org_id = org_id current_proj_id = proj_id transaction_counts.append((transaction_name, num_transactions)) - if not more_results: - if ( - len(transaction_counts) > 0 - and current_proj_id is not None - and current_org_id is not None - ): - yield { + + # collect the last project data + if transaction_counts: + # since we accumulated some transactions we must have set the org and proj + assert current_proj_id is not None + assert current_org_id is not None + self.cache.append( + { "project_id": current_proj_id, "org_id": current_org_id, "transaction_counts": transaction_counts, "total_num_transactions": None, "total_num_classes": None, } - break - else: - log_query_timeout( - query="fetch_transactions_with_total_volumes", - offset=offset, - timeout_seconds=MAX_SECONDS, - ) + ) + + def _cache_empty(self): + return not self.cache + + def _get_from_cache(self) -> ProjectTransactions: + if self._cache_empty(): + raise StopIteration() - return None + return self.cache.pop(0) + + def _ensure_log_state(self): + if self.log_state is None: + self.log_state = DynamicSamplingLogState() + + def get_current_state(self): + """ + Returns the current state of the iterator (how many orgs and projects it has iterated over) + + part of the ContexIterator protocol + + """ + self._ensure_log_state() + + return self.log_state + + def set_current_state(self, log_state: DynamicSamplingLogState) -> None: + """ + Set the log state from outside (typically immediately after creation) + + part of the ContextIterator protocol + + This is typically used when multiple iterators are concatenated into one logical operation + in order to accumulate results into one state. + """ + self.log_state = log_state def merge_transactions( @@ -535,17 +623,19 @@ def merge_transactions( (totals["org_id"], totals["project_id"]), ) + assert left is not None + if right is None: merged_transactions = left["transaction_counts"] else: # we have both left and right we need to merge - transactions = set() + names = set() merged_transactions = [*left["transaction_counts"]] for transaction_name, _ in merged_transactions: - transactions.add(transaction_name) + names.add(transaction_name) for transaction_name, count in right["transaction_counts"]: - if transaction_name not in transactions: + if transaction_name not in names: # not already in left, add it merged_transactions.append((transaction_name, count)) @@ -553,8 +643,10 @@ def merge_transactions( "org_id": left["org_id"], "project_id": left["project_id"], "transaction_counts": merged_transactions, - "total_num_transactions": totals["total_num_transactions"] if totals is not None else None, - "total_num_classes": totals["total_num_classes"] if totals is not None else None, + "total_num_transactions": totals.get("total_num_transactions") + if totals is not None + else None, + "total_num_classes": totals.get("total_num_classes") if totals is not None else None, } diff --git a/src/sentry/dynamic_sampling/tasks/collect_orgs.py b/src/sentry/dynamic_sampling/tasks/collect_orgs.py index e7a875b50cab82..804e5d2e07bd30 100644 --- a/src/sentry/dynamic_sampling/tasks/collect_orgs.py +++ b/src/sentry/dynamic_sampling/tasks/collect_orgs.py @@ -19,17 +19,16 @@ ) @dynamic_sampling_task def collect_orgs() -> None: - enabled = options.get("dynamic_sampling.tasks.collect_orgs") or False if not enabled: return - context = TaskContext("sentry.dynamic_sampling.tasks.collect_orgs", MAX_SECONDS) + context = TaskContext("sentry.dynamic-sampling.tasks.collect_orgs", MAX_SECONDS) iterator_name = GetActiveOrgs.__name__ try: for orgs in TimedIterator( - context, iterator_name, GetActiveOrgs(max_projects=MAX_PROJECTS_PER_QUERY) + context, GetActiveOrgs(max_projects=MAX_PROJECTS_PER_QUERY), iterator_name ): pass except TimeoutException: diff --git a/src/sentry/dynamic_sampling/tasks/common.py b/src/sentry/dynamic_sampling/tasks/common.py index 6bbd8b79314ad3..6ecec5b44f067f 100644 --- a/src/sentry/dynamic_sampling/tasks/common.py +++ b/src/sentry/dynamic_sampling/tasks/common.py @@ -66,6 +66,16 @@ def get_current_state(self) -> DynamicSamplingLogState: """ ... + def set_current_state(self, state: DynamicSamplingLogState) -> None: + """ + Sets the current iterator state. + + If multiple iterators are used in a logical operation the state can be set + at the beginning of the iteration so the state can be passed from one iterator + to the next in order to measure the overall operation + """ + ... + class TimedIterator(Iterator[Any]): """ @@ -74,12 +84,29 @@ class TimedIterator(Iterator[Any]): It updates the task context with the current state of the inner iterator at each step """ - def __init__(self, context: TaskContext, name: str, inner: ContextIterator): + def __init__( + self, + context: TaskContext, + inner: ContextIterator, + name: Optional[str] = None, + timer: Optional[Timer] = None, + ): self.context = context - self.iterator_execution_time = Timer() - self.name = name self.inner = inner + if name is None: + name = inner.__class__.__name__ + self.name = name + + if timer is None: + self.iterator_execution_time = Timer() + else: + self.iterator_execution_time = timer + + # in case the iterator is part of a logical state spanning multiple instantiations + # pick up where you last left of + inner.set_current_state(context.get_function_state(name)) + def __iter__(self): return self @@ -93,6 +120,18 @@ def __next__(self): self.context.set_function_state(self.name, state) return val + def get_current_state(self) -> DynamicSamplingLogState: + """ + Make the TimedIterator a ContextIterator by forwarding to the inner iterator + """ + return self.inner.get_current_state() + + def set_current_state(self, state: DynamicSamplingLogState) -> None: + """ + Make the TimedIterator a ContextIterator by forwarding to the inner iterator + """ + self.inner.set_current_state(state) + class GetActiveOrgs: """ @@ -184,6 +223,9 @@ def get_current_state(self): """ return self.log_state + def set_current_state(self, log_state: DynamicSamplingLogState): + self.log_state = log_state + def _enough_results_cached(self): """ Return true if we have enough data to return a full batch in the cache (i.e. last_result) @@ -213,7 +255,7 @@ def _get_from_cache(self): for idx, (org_id, num_projects) in enumerate(self.last_result): count_projects += num_projects self.log_state.num_orgs += 1 - self.log_state.num_orgs += num_projects + self.log_state.num_projects += num_projects if idx >= (self.max_orgs - 1) or ( self.max_projects is not None and count_projects >= self.max_projects ): diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 8dd2e3802e219b..df0bfcd8e6c4b7 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -1392,4 +1392,4 @@ register("crons.organization.disable-check-in", type=Sequence, default=[]) # Turns on and off the running for dynamic sampling collect_orgs. -register("dynamic_sampling.tasks.collect_orgs", default=False, flags=FLAG_MODIFIABLE_BOOL) +register("dynamic-sampling.tasks.collect_orgs", default=False, flags=FLAG_MODIFIABLE_BOOL) diff --git a/tests/sentry/dynamic_sampling/tasks/test_boost_low_volume_transactions.py b/tests/sentry/dynamic_sampling/tasks/test_boost_low_volume_transactions.py index 149e1133372650..ae4ac0ace5c401 100644 --- a/tests/sentry/dynamic_sampling/tasks/test_boost_low_volume_transactions.py +++ b/tests/sentry/dynamic_sampling/tasks/test_boost_low_volume_transactions.py @@ -5,18 +5,18 @@ from freezegun import freeze_time from sentry.dynamic_sampling.tasks.boost_low_volume_transactions import ( + FetchProjectTransactionTotals, + FetchProjectTransactionVolumes, ProjectIdentity, ProjectTransactions, ProjectTransactionsTotals, - fetch_project_transaction_totals, - fetch_transactions_with_total_volumes, - get_orgs_with_project_counts, is_project_identity_before, is_same_project, merge_transactions, next_totals, transactions_zip, ) +from sentry.dynamic_sampling.tasks.common import GetActiveOrgs from sentry.dynamic_sampling.tasks.recalibrate_orgs import fetch_org_volumes, get_active_orgs from sentry.snuba.metrics.naming_layer.mri import TransactionMRI from sentry.testutils import BaseMetricsLayerTestCase, SnubaTestCase, TestCase @@ -79,14 +79,14 @@ def get_total_counts_for_project(self, idx: int): return 1 + 100 + 1000 + 2000 + 3000 + idx * 5, 5 def test_get_orgs_with_transactions_respects_max_orgs(self): - actual = list(get_orgs_with_project_counts(2, 20)) + actual = list(GetActiveOrgs(2, 20)) orgs = self.org_ids # we should return groups of 2 orgs at a time assert actual == [[orgs[0], orgs[1]], [orgs[2]]] def test_get_orgs_with_transactions_respects_max_projs(self): - actual = list(get_orgs_with_project_counts(10, 5)) + actual = list(GetActiveOrgs(10, 5)) orgs = [org["org_id"] for org in self.orgs_info] # since each org has 3 projects and we have a limit of 5 proj @@ -103,7 +103,7 @@ def test_fetch_transactions_with_total_volumes_large(self): orgs = self.org_ids expected_names = {"tm3", "tl5", "tl4"} - for idx, p_tran in enumerate(fetch_transactions_with_total_volumes(orgs, True, 3)): + for idx, p_tran in enumerate(FetchProjectTransactionVolumes(orgs, True, 3)): if p_tran is not None: assert len(p_tran["transaction_counts"]) == 3 for name, count in p_tran["transaction_counts"]: @@ -120,7 +120,7 @@ def test_fetch_transactions_with_total_volumes_small(self): orgs = self.org_ids expected_names = {"ts1", "ts2"} - for idx, p_tran in enumerate(fetch_transactions_with_total_volumes(orgs, False, 2)): + for idx, p_tran in enumerate(FetchProjectTransactionVolumes(orgs, False, 2)): assert len(p_tran["transaction_counts"]) == 2 if p_tran is not None: for name, count in p_tran["transaction_counts"]: @@ -136,7 +136,7 @@ def test_fetch_transactions_with_total_volumes(self): orgs = self.org_ids - for idx, totals in enumerate(fetch_project_transaction_totals(orgs)): + for idx, totals in enumerate(FetchProjectTransactionTotals(orgs)): total_counts, num_classes = self.get_total_counts_for_project(idx) assert totals["total_num_transactions"] == total_counts assert totals["total_num_classes"] == num_classes diff --git a/tests/sentry/dynamic_sampling/tasks/test_common.py b/tests/sentry/dynamic_sampling/tasks/test_common.py index d6ea1a20ebfa0d..b6c6ef428375b5 100644 --- a/tests/sentry/dynamic_sampling/tasks/test_common.py +++ b/tests/sentry/dynamic_sampling/tasks/test_common.py @@ -46,12 +46,15 @@ def __next__(self): def get_current_state(self): return DynamicSamplingLogState(num_iterations=self.count) + def set_current_state(self, state: DynamicSamplingLogState): + self.count = state.num_iterations + def test_timed_iterator_no_timout(): with freeze_time("2023-07-12 10:00:00") as frozen_time: context = TaskContext("my_context", 3) - it = TimedIterator(context, "ti1", FakeContextIterator(frozen_time, 1)) + it = TimedIterator(context, FakeContextIterator(frozen_time, 1), "ti1") # should iterate while there is no timeout assert (next(it)) == 1 assert context.get_function_state("ti1") == DynamicSamplingLogState( @@ -78,7 +81,7 @@ def test_timed_iterator_no_timout(): def test_timed_iterator_with_timeout(): with freeze_time("2023-07-12 10:00:00") as frozen_time: context = TaskContext("my_context", 3) - it = TimedIterator(context, "ti1", FakeContextIterator(frozen_time, 4)) + it = TimedIterator(context, FakeContextIterator(frozen_time, 4), "ti1") # should iterate while there is no timeout assert (next(it)) == 1 assert context.get_function_state("ti1") == DynamicSamplingLogState(