From fb559e5a7b6c1bba7ce4377970ea54c62e14b258 Mon Sep 17 00:00:00 2001 From: Athena Moghaddam Date: Fri, 28 Jun 2024 12:48:42 +0300 Subject: [PATCH 1/7] feat(apis): Pass query source to snuba - phase 2 --- src/sentry/snuba/metrics_performance.py | 6 ++---- src/sentry/snuba/transactions.py | 10 ++++++++++ 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/src/sentry/snuba/metrics_performance.py b/src/sentry/snuba/metrics_performance.py index 7ea9308ab69bfc..78358a1078a5d8 100644 --- a/src/sentry/snuba/metrics_performance.py +++ b/src/sentry/snuba/metrics_performance.py @@ -109,8 +109,7 @@ def bulk_timeseries_query( groupby: Column | None = None, *, apply_formatting: Literal[False], -) -> EventsResponse: - ... +) -> EventsResponse: ... @overload @@ -130,8 +129,7 @@ def bulk_timeseries_query( on_demand_metrics_enabled: bool = False, on_demand_metrics_type: MetricSpecType | None = None, groupby: Column | None = None, -) -> SnubaTSResult: - ... +) -> SnubaTSResult: ... def bulk_timeseries_query( diff --git a/src/sentry/snuba/transactions.py b/src/sentry/snuba/transactions.py index 7287c6133b6f8a..ed41e564d9cae9 100644 --- a/src/sentry/snuba/transactions.py +++ b/src/sentry/snuba/transactions.py @@ -8,7 +8,10 @@ from sentry.search.events.types import EventsResponse, ParamsType, SnubaParams from sentry.snuba import discover from sentry.snuba.dataset import Dataset +<<<<<<< HEAD from sentry.snuba.metrics.extraction import MetricSpecType +======= +>>>>>>> d3154bf626f (feat(apis): Pass query source to snuba - phase 2) from sentry.snuba.query_sources import QuerySource from sentry.utils.snuba import SnubaTSResult @@ -132,10 +135,17 @@ def top_events_timeseries( include_other: bool = False, functions_acl: list[str] | None = None, on_demand_metrics_enabled: bool = False, +<<<<<<< HEAD on_demand_metrics_type: MetricSpecType | None = None, query_source: QuerySource | None = None, ) -> dict[str, SnubaTSResult] | SnubaTSResult: return discover.top_events_timeseries( +======= + on_demand_metrics_type=None, + query_source: QuerySource | None = None, +): + return discover._top_events_timeseries( +>>>>>>> d3154bf626f (feat(apis): Pass query source to snuba - phase 2) timeseries_columns, selected_columns, user_query, From 06fee27fd1beb27c4902763fde73e879cf3dc293 Mon Sep 17 00:00:00 2001 From: Athena Moghaddam Date: Fri, 28 Jun 2024 13:35:48 +0300 Subject: [PATCH 2/7] feat(apis): Pass query source to snuba - phase 3 --- .../api/endpoints/organization_events.py | 32 +++--- .../endpoints/organization_events_trace.py | 97 ++++++++++++++++--- .../organization_events_trends_v2.py | 5 + src/sentry/snuba/metrics_performance.py | 7 +- src/sentry/snuba/transactions.py | 12 +-- 5 files changed, 113 insertions(+), 40 deletions(-) diff --git a/src/sentry/api/endpoints/organization_events.py b/src/sentry/api/endpoints/organization_events.py index a1a4097cf0ffbb..398ce8b40178a3 100644 --- a/src/sentry/api/endpoints/organization_events.py +++ b/src/sentry/api/endpoints/organization_events.py @@ -21,6 +21,7 @@ from sentry.apidocs.utils import inline_sentry_response_serializer from sentry.discover.models import DiscoverSavedQuery, DiscoverSavedQueryTypes from sentry.exceptions import InvalidParams +from sentry.middleware import is_frontend_request from sentry.models.dashboard_widget import DashboardWidget, DashboardWidgetTypes from sentry.models.organization import Organization from sentry.snuba import ( @@ -31,6 +32,7 @@ transactions, ) from sentry.snuba.metrics.extraction import MetricSpecType +from sentry.snuba.query_sources import QuerySource from sentry.snuba.referrer import Referrer from sentry.snuba.utils import dataset_split_decision_inferred_from_query, get_dataset from sentry.types.ratelimit import RateLimit, RateLimitCategory @@ -389,6 +391,7 @@ def get(self, request: Request, organization) -> Response: referrer = Referrer.API_ORGANIZATION_EVENTS.value def _data_fn(scoped_dataset, offset, limit, query) -> dict[str, Any]: + query_source = QuerySource.FRONTEND if is_frontend_request(request) else QuerySource.API return scoped_dataset.query( selected_columns=self.get_field_list(organization, request), query=query, @@ -409,6 +412,7 @@ def _data_fn(scoped_dataset, offset, limit, query) -> dict[str, Any]: use_metrics_layer=batch_features.get("organizations:use-metrics-layer", False), on_demand_metrics_enabled=on_demand_metrics_enabled, on_demand_metrics_type=on_demand_metrics_type, + query_source=query_source, ) @sentry_sdk.tracing.trace @@ -467,14 +471,14 @@ def _dashboards_data_fn(scoped_dataset, offset, limit, scoped_query, dashboard_w if decision == DashboardWidgetTypes.DISCOVER: return _data_fn(discover, offset, limit, scoped_query) elif decision == DashboardWidgetTypes.TRANSACTION_LIKE: - original_results["meta"][ - "discoverSplitDecision" - ] = DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.TRANSACTION_LIKE) + original_results["meta"]["discoverSplitDecision"] = ( + DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.TRANSACTION_LIKE) + ) return original_results elif decision == DashboardWidgetTypes.ERROR_EVENTS and error_results: - error_results["meta"][ - "discoverSplitDecision" - ] = DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.ERROR_EVENTS) + error_results["meta"]["discoverSplitDecision"] = ( + DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.ERROR_EVENTS) + ) return error_results else: return original_results @@ -547,10 +551,10 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save try: error_results = map["errors"] - error_results["meta"][ - "discoverSplitDecision" - ] = DiscoverSavedQueryTypes.get_type_name( - DiscoverSavedQueryTypes.ERROR_EVENTS + error_results["meta"]["discoverSplitDecision"] = ( + DiscoverSavedQueryTypes.get_type_name( + DiscoverSavedQueryTypes.ERROR_EVENTS + ) ) has_errors = len(error_results["data"]) > 0 except KeyError: @@ -558,10 +562,10 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save try: transaction_results = map["transactions"] - transaction_results["meta"][ - "discoverSplitDecision" - ] = DiscoverSavedQueryTypes.get_type_name( - DiscoverSavedQueryTypes.TRANSACTION_LIKE + transaction_results["meta"]["discoverSplitDecision"] = ( + DiscoverSavedQueryTypes.get_type_name( + DiscoverSavedQueryTypes.TRANSACTION_LIKE + ) ) has_transactions = len(transaction_results["data"]) > 0 except KeyError: diff --git a/src/sentry/api/endpoints/organization_events_trace.py b/src/sentry/api/endpoints/organization_events_trace.py index cc42d5dca0bb06..ca98ce637cff47 100644 --- a/src/sentry/api/endpoints/organization_events_trace.py +++ b/src/sentry/api/endpoints/organization_events_trace.py @@ -24,6 +24,7 @@ from sentry.api.utils import handle_query_errors, update_snuba_params_with_timestamp from sentry.eventstore.models import Event, GroupEvent from sentry.issues.issue_occurrence import IssueOccurrence +from sentry.middleware import is_frontend_request from sentry.models.group import Group from sentry.models.organization import Organization from sentry.models.project import Project @@ -32,6 +33,7 @@ from sentry.search.events.builder.spans_indexed import SpansIndexedQueryBuilder from sentry.search.events.types import QueryBuilderConfig, SnubaParams from sentry.snuba.dataset import Dataset +from sentry.snuba.query_sources import QuerySource from sentry.snuba.referrer import Referrer from sentry.utils.iterators import chunked from sentry.utils.numbers import base32_encode, format_grouped_length @@ -192,11 +194,13 @@ def __init__( light: bool = False, snuba_params: SnubaParams | None = None, span_serialized: bool = False, + query_source: QuerySource | None = QuerySource.SENTRY_BACKEND, ) -> None: self.event: SnubaTransaction = event self.errors: list[TraceError] = [] self.children: list[TraceEvent] = [] self.performance_issues: list[TracePerformanceIssue] = [] + self.query_source = query_source # Can be None on the light trace when we don't know the parent self.parent_event_id: str | None = parent @@ -306,7 +310,8 @@ def load_performance_issues(self, light: bool, snuba_params: SnubaParams | None) ) occurrence_ids = occurrence_query.process_results( occurrence_query.run_query( - referrer=Referrer.API_TRACE_VIEW_GET_OCCURRENCE_IDS.value + referrer=Referrer.API_TRACE_VIEW_GET_OCCURRENCE_IDS.value, + query_source=self.query_source, ) )["data"] @@ -499,7 +504,11 @@ def child_sort_key(item: TraceEvent) -> list[int | str]: ] -def count_performance_issues(trace_id: str, params: SnubaParams) -> int: +def count_performance_issues( + trace_id: str, + params: SnubaParams, + query_source: QuerySource | None = QuerySource.SENTRY_BACKEND, +) -> int: transaction_query = DiscoverQueryBuilder( Dataset.IssuePlatform, params={}, @@ -510,7 +519,8 @@ def count_performance_issues(trace_id: str, params: SnubaParams) -> int: ) transaction_query.columns.append(Function("count()", alias="total_groups")) count = transaction_query.run_query( - referrer=Referrer.API_TRACE_VIEW_COUNT_PERFORMANCE_ISSUES.value + referrer=Referrer.API_TRACE_VIEW_COUNT_PERFORMANCE_ISSUES.value, + query_source=query_source, ) return count["data"][0].get("total_groups", 0) @@ -519,6 +529,7 @@ def count_performance_issues(trace_id: str, params: SnubaParams) -> int: def create_transaction_params( trace_id: str, snuba_params: SnubaParams, + query_source: QuerySource | None = QuerySource.SENTRY_BACKEND, ) -> SnubaParams: """Can't use the transaction params for errors since traces can be errors only""" query_metadata = options.get("performance.traces.query_timestamp_projects") @@ -537,7 +548,9 @@ def create_transaction_params( "project.id", ], ) - results = metadata_query.run_query(Referrer.API_TRACE_VIEW_GET_TIMESTAMP_PROJECTS.value) + results = metadata_query.run_query( + Referrer.API_TRACE_VIEW_GET_TIMESTAMP_PROJECTS.value, query_source=query_source + ) results = metadata_query.process_results(results) project_id_set = set() min_timestamp = None @@ -582,6 +595,7 @@ def query_trace_data( limit: int, event_id: str | None, use_spans: bool, + query_source: QuerySource | None = QuerySource.SENTRY_BACKEND, ) -> tuple[Sequence[SnubaTransaction], Sequence[SnubaError]]: transaction_columns = [ "id", @@ -671,6 +685,7 @@ def query_trace_data( occurrence_query.get_snql_query(), ], referrer=Referrer.API_TRACE_VIEW_GET_EVENTS.value, + query_source=query_source, ) transformed_results = [ @@ -773,6 +788,7 @@ def augment_transactions_with_spans( errors: Sequence[SnubaError], trace_id: str, params: SnubaParams, + query_source: QuerySource | None = QuerySource.SENTRY_BACKEND, ) -> Sequence[SnubaTransaction]: """Augment the list of transactions with parent, error and problem data""" with sentry_sdk.start_span(op="augment.transactions", description="setup"): @@ -881,6 +897,7 @@ def augment_transactions_with_spans( results = bulk_snuba_queries( [query.get_snql_query() for query in queries], referrer=Referrer.API_TRACE_VIEW_GET_PARENTS.value, + query_source=query_source, ) parents_results = results[0] for result, query in zip(results, queries): @@ -891,7 +908,8 @@ def augment_transactions_with_spans( else: parents_query = build_span_query(trace_id, spans_params, list(query_spans)) parents_results = parents_query.run_query( - referrer=Referrer.API_TRACE_VIEW_GET_PARENTS.value + referrer=Referrer.API_TRACE_VIEW_GET_PARENTS.value, + query_source=query_source, ) if len(parents_results) == parents_query.limit.limit: hit_limit = True @@ -1053,19 +1071,38 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht if event_id and not is_event_id(event_id): return Response({"detail": INVALID_ID_DETAILS.format("Event ID")}, status=400) + query_source = QuerySource.FRONTEND if is_frontend_request(request) else QuerySource.API with handle_query_errors(): - transaction_params = create_transaction_params(trace_id, snuba_params) + transaction_params = create_transaction_params( + trace_id, snuba_params, query_source=query_source + ) if use_spans: transactions, errors = query_trace_data( - trace_id, snuba_params, transaction_params, limit, event_id, use_spans + trace_id, + snuba_params, + transaction_params, + limit, + event_id, + use_spans, + query_source=query_source, ) transactions = augment_transactions_with_spans( - transactions, errors, trace_id, snuba_params + transactions, + errors, + trace_id, + snuba_params, + query_source=query_source, ) else: transactions, errors = query_trace_data( - trace_id, snuba_params, transaction_params, limit, None, False + trace_id, + snuba_params, + transaction_params, + limit, + None, + False, + query_source=query_source, ) self.record_analytics(transactions, trace_id, self.request.user.id, organization.id) @@ -1097,6 +1134,9 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht event_id, detailed, use_spans, + query_source=( + QuerySource.FRONTEND if is_frontend_request(request) else QuerySource.API + ), ) ) @@ -1183,6 +1223,7 @@ def serialize( event_id: str | None, detailed: bool = False, use_spans: bool = False, + query_source: QuerySource | None = None, ) -> dict[str, list[LightResponse | TraceError]]: """Because the light endpoint could potentially have gaps between root and event we return a flattened list""" if use_spans: @@ -1249,13 +1290,19 @@ def serialize( 0, True, snuba_params=snuba_params, + query_source=query_source, ) ) current_generation = 1 break current_event = TraceEvent( - snuba_event, root_id, current_generation, True, snuba_params=snuba_params + snuba_event, + root_id, + current_generation, + True, + snuba_params=snuba_params, + query_source=query_source, ) trace_results.append(current_event) @@ -1288,6 +1335,7 @@ def serialize( ), True, snuba_params=snuba_params, + query_source=query_source, ) for child_event in child_events ] @@ -1350,6 +1398,7 @@ def serialize( event_id: str | None, detailed: bool = False, use_spans: bool = False, + query_source: QuerySource | None = None, ) -> SerializedTrace: """For the full event trace, we return the results as a graph instead of a flattened list @@ -1365,6 +1414,7 @@ def serialize( warning_extra, event_id, detailed, + query_source=query_source, ) return results @@ -1384,7 +1434,9 @@ def serialize( if roots: results_map[None] = [] for root in roots: - root_event = TraceEvent(root, None, 0, snuba_params=snuba_params) + root_event = TraceEvent( + root, None, 0, snuba_params=snuba_params, query_source=query_source + ) parent_events[root["id"]] = root_event results_map[None].append(root_event) to_check.append(root) @@ -1405,7 +1457,11 @@ def serialize( parent_map[parent_span_id] = siblings previous_event = parent_events[current_event["id"]] = TraceEvent( - current_event, None, 0, snuba_params=snuba_params + current_event, + None, + 0, + snuba_params=snuba_params, + query_source=query_source, ) # Used to avoid removing the orphan from results entirely if we loop @@ -1480,6 +1536,7 @@ def serialize( else None ), snuba_params=snuba_params, + query_source=query_source, ) # Add this event to its parent's children previous_event.children.append(parent_events[child_event["id"]]) @@ -1552,6 +1609,7 @@ def serialize_with_spans( event_id: str | None, detailed: bool = False, ) -> SerializedTrace: + query_source: QuerySource | None = (None,) root_traces: list[TraceEvent] = [] orphans: list[TraceEvent] = [] orphan_event_ids: set[str] = set() @@ -1565,7 +1623,11 @@ def serialize_with_spans( for transaction in transactions: parent_id = transaction["trace.parent_transaction"] serialized_transaction = TraceEvent( - transaction, parent_id, -1, span_serialized=True + transaction, + parent_id, + -1, + span_serialized=True, + query_source=query_source, ) if parent_id is None: if transaction["trace.parent_span"]: @@ -1642,6 +1704,7 @@ class OrganizationEventsTraceMetaEndpoint(OrganizationEventsV2EndpointBase): publish_status = { "GET": ApiPublishStatus.PRIVATE, } + snuba_methods = ["GET"] def get(self, request: Request, organization: Organization, trace_id: str) -> HttpResponse: if not self.has_feature(organization, request): @@ -1656,7 +1719,7 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht return Response(status=404) update_snuba_params_with_timestamp(request, snuba_params) - + query_source = QuerySource.FRONTEND if is_frontend_request(request) else QuerySource.API meta_query = DiscoverQueryBuilder( dataset=Dataset.Discover, selected_columns=[ @@ -1681,6 +1744,7 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht query=f"trace:{trace_id}", limit=10_000, ) + with handle_query_errors(): results = bulk_snuba_queries( [ @@ -1688,13 +1752,16 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht transaction_children_query.get_snql_query(), ], referrer=Referrer.API_TRACE_VIEW_GET_META.value, + query_source=query_source, ) meta_result, children_result = results[0], results[1] if len(meta_result["data"]) == 0: return Response(status=404) # Merge the result back into the first query meta_result["data"][0]["performance_issues"] = count_performance_issues( - trace_id, snuba_params + trace_id, + snuba_params, + query_source=query_source, ) return Response(self.serialize(meta_result["data"][0], children_result["data"])) diff --git a/src/sentry/api/endpoints/organization_events_trends_v2.py b/src/sentry/api/endpoints/organization_events_trends_v2.py index 54635e2abcead5..3cd21124fae157 100644 --- a/src/sentry/api/endpoints/organization_events_trends_v2.py +++ b/src/sentry/api/endpoints/organization_events_trends_v2.py @@ -13,11 +13,13 @@ from sentry.api.bases import NoProjects, OrganizationEventsV2EndpointBase from sentry.api.paginator import GenericOffsetPaginator from sentry.api.utils import handle_query_errors +from sentry.middleware import is_frontend_request from sentry.search.events.constants import METRICS_GRANULARITIES from sentry.seer.breakpoints import detect_breakpoints from sentry.snuba import metrics_performance from sentry.snuba.discover import create_result_key, zerofill from sentry.snuba.metrics_performance import query as metrics_query +from sentry.snuba.query_sources import QuerySource from sentry.snuba.referrer import Referrer from sentry.types.ratelimit import RateLimit, RateLimitCategory from sentry.utils.iterators import chunked @@ -90,6 +92,7 @@ def get(self, request: Request, organization) -> Response: selected_columns = ["project_id", "transaction"] query = request.GET.get("query") + query_source = QuerySource.FRONTEND if is_frontend_request(request) else QuerySource.API def get_top_events(user_query, params, event_limit, referrer): top_event_columns = selected_columns[:] @@ -108,6 +111,7 @@ def get_top_events(user_query, params, event_limit, referrer): auto_aggregations=True, use_aggregate_conditions=True, granularity=DAY_GRANULARITY_IN_SECONDS, + query_source=query_source, ) def generate_top_transaction_query(events): @@ -153,6 +157,7 @@ def get_timeseries(top_events, _, rollup, zerofill_results): referrer=Referrer.API_TRENDS_GET_EVENT_STATS_V2_TIMESERIES.value, groupby=[Column("project_id"), Column("transaction")], apply_formatting=False, + query_source=query_source, ) # Parse results diff --git a/src/sentry/snuba/metrics_performance.py b/src/sentry/snuba/metrics_performance.py index 78358a1078a5d8..330c8ddef8c61a 100644 --- a/src/sentry/snuba/metrics_performance.py +++ b/src/sentry/snuba/metrics_performance.py @@ -109,6 +109,7 @@ def bulk_timeseries_query( groupby: Column | None = None, *, apply_formatting: Literal[False], + query_source: QuerySource | None = None, ) -> EventsResponse: ... @@ -129,6 +130,7 @@ def bulk_timeseries_query( on_demand_metrics_enabled: bool = False, on_demand_metrics_type: MetricSpecType | None = None, groupby: Column | None = None, + query_source: QuerySource | None = None, ) -> SnubaTSResult: ... @@ -150,6 +152,7 @@ def bulk_timeseries_query( groupby: Column | None = None, *, apply_formatting: bool = True, + query_source: QuerySource | None = None, ) -> SnubaTSResult | EventsResponse: """ High-level API for doing *bulk* arbitrary user timeseries queries against events. @@ -184,7 +187,9 @@ def bulk_timeseries_query( metrics_queries.append(snql_query[0]) metrics_referrer = referrer + ".metrics-enhanced" - bulk_result = bulk_snuba_queries(metrics_queries, metrics_referrer) + bulk_result = bulk_snuba_queries( + metrics_queries, metrics_referrer, query_source=query_source + ) _result: dict[str, Any] = {"data": []} for br in bulk_result: _result["data"] = [*_result["data"], *br["data"]] diff --git a/src/sentry/snuba/transactions.py b/src/sentry/snuba/transactions.py index ed41e564d9cae9..3b5a10f7083082 100644 --- a/src/sentry/snuba/transactions.py +++ b/src/sentry/snuba/transactions.py @@ -8,10 +8,7 @@ from sentry.search.events.types import EventsResponse, ParamsType, SnubaParams from sentry.snuba import discover from sentry.snuba.dataset import Dataset -<<<<<<< HEAD from sentry.snuba.metrics.extraction import MetricSpecType -======= ->>>>>>> d3154bf626f (feat(apis): Pass query source to snuba - phase 2) from sentry.snuba.query_sources import QuerySource from sentry.utils.snuba import SnubaTSResult @@ -45,6 +42,7 @@ def query( on_demand_metrics_type: MetricSpecType | None = None, dataset: Dataset = Dataset.Discover, fallback_to_transactions: bool = False, + query_source: QuerySource | None = None, ) -> EventsResponse: return discover.query( selected_columns, @@ -73,6 +71,7 @@ def query( on_demand_metrics_type=on_demand_metrics_type, dataset=Dataset.Transactions, fallback_to_transactions=fallback_to_transactions, + query_source=query_source, ) @@ -135,17 +134,10 @@ def top_events_timeseries( include_other: bool = False, functions_acl: list[str] | None = None, on_demand_metrics_enabled: bool = False, -<<<<<<< HEAD on_demand_metrics_type: MetricSpecType | None = None, query_source: QuerySource | None = None, ) -> dict[str, SnubaTSResult] | SnubaTSResult: return discover.top_events_timeseries( -======= - on_demand_metrics_type=None, - query_source: QuerySource | None = None, -): - return discover._top_events_timeseries( ->>>>>>> d3154bf626f (feat(apis): Pass query source to snuba - phase 2) timeseries_columns, selected_columns, user_query, From aaa0cdfc39239f02e6a8526aa1b9cba068a4d0fb Mon Sep 17 00:00:00 2001 From: Athena Moghaddam Date: Tue, 6 Aug 2024 09:46:12 -0700 Subject: [PATCH 3/7] feedback + merge conflicts --- src/sentry/api/base.py | 19 +++++++++-- .../api/endpoints/organization_events.py | 32 +++++++++---------- .../endpoints/organization_events_stats.py | 3 +- .../endpoints/organization_events_trace.py | 4 +-- .../organization_events_trends_v2.py | 4 +-- 5 files changed, 35 insertions(+), 27 deletions(-) diff --git a/src/sentry/api/base.py b/src/sentry/api/base.py index 3358b50d766704..6f8c38c1b53587 100644 --- a/src/sentry/api/base.py +++ b/src/sentry/api/base.py @@ -31,10 +31,12 @@ from sentry.apidocs.hooks import HTTP_METHOD_NAME from sentry.auth import access from sentry.auth.staff import has_staff_option +from sentry.middleware import is_frontend_request from sentry.models.environment import Environment from sentry.organizations.absolute_url import generate_organization_url from sentry.ratelimits.config import DEFAULT_RATE_LIMIT_CONFIG, RateLimitConfig from sentry.silo.base import SiloLimit, SiloMode +from sentry.snuba.query_sources import QuerySource from sentry.types.ratelimit import RateLimit, RateLimitCategory from sentry.utils.audit import create_audit_entry from sentry.utils.cursors import Cursor @@ -221,9 +223,11 @@ class Endpoint(APIView): owner: ApiOwner = ApiOwner.UNOWNED publish_status: dict[HTTP_METHOD_NAME, ApiPublishStatus] = {} - rate_limits: RateLimitConfig | dict[str, dict[RateLimitCategory, RateLimit]] | Callable[ - ..., RateLimitConfig | dict[str, dict[RateLimitCategory, RateLimit]] - ] = DEFAULT_RATE_LIMIT_CONFIG + rate_limits: ( + RateLimitConfig + | dict[str, dict[RateLimitCategory, RateLimit]] + | Callable[..., RateLimitConfig | dict[str, dict[RateLimitCategory, RateLimit]]] + ) = DEFAULT_RATE_LIMIT_CONFIG enforce_rate_limit: bool = settings.SENTRY_RATELIMITER_ENABLED snuba_methods: list[HTTP_METHOD_NAME] = [] @@ -582,6 +586,15 @@ def paginate( self.add_cursor_headers(request, response, cursor_result) return response + def get_request_source(request: Request) -> QuerySource: + """ + This is an estimate of query source. Treat it more like a good guess and + don't write logic that depends on it. Used for monitoring only atm. + """ + if is_frontend_request(request): + return QuerySource.FRONTEND + return QuerySource.API + class EnvironmentMixin: def _get_environment_func(self, request: Request, organization_id): diff --git a/src/sentry/api/endpoints/organization_events.py b/src/sentry/api/endpoints/organization_events.py index 398ce8b40178a3..6ff4259bdda2e1 100644 --- a/src/sentry/api/endpoints/organization_events.py +++ b/src/sentry/api/endpoints/organization_events.py @@ -21,7 +21,6 @@ from sentry.apidocs.utils import inline_sentry_response_serializer from sentry.discover.models import DiscoverSavedQuery, DiscoverSavedQueryTypes from sentry.exceptions import InvalidParams -from sentry.middleware import is_frontend_request from sentry.models.dashboard_widget import DashboardWidget, DashboardWidgetTypes from sentry.models.organization import Organization from sentry.snuba import ( @@ -32,7 +31,6 @@ transactions, ) from sentry.snuba.metrics.extraction import MetricSpecType -from sentry.snuba.query_sources import QuerySource from sentry.snuba.referrer import Referrer from sentry.snuba.utils import dataset_split_decision_inferred_from_query, get_dataset from sentry.types.ratelimit import RateLimit, RateLimitCategory @@ -391,7 +389,7 @@ def get(self, request: Request, organization) -> Response: referrer = Referrer.API_ORGANIZATION_EVENTS.value def _data_fn(scoped_dataset, offset, limit, query) -> dict[str, Any]: - query_source = QuerySource.FRONTEND if is_frontend_request(request) else QuerySource.API + query_source = self.get_request_source(request) return scoped_dataset.query( selected_columns=self.get_field_list(organization, request), query=query, @@ -471,14 +469,14 @@ def _dashboards_data_fn(scoped_dataset, offset, limit, scoped_query, dashboard_w if decision == DashboardWidgetTypes.DISCOVER: return _data_fn(discover, offset, limit, scoped_query) elif decision == DashboardWidgetTypes.TRANSACTION_LIKE: - original_results["meta"]["discoverSplitDecision"] = ( - DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.TRANSACTION_LIKE) - ) + original_results["meta"][ + "discoverSplitDecision" + ] = DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.TRANSACTION_LIKE) return original_results elif decision == DashboardWidgetTypes.ERROR_EVENTS and error_results: - error_results["meta"]["discoverSplitDecision"] = ( - DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.ERROR_EVENTS) - ) + error_results["meta"][ + "discoverSplitDecision" + ] = DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.ERROR_EVENTS) return error_results else: return original_results @@ -551,10 +549,10 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save try: error_results = map["errors"] - error_results["meta"]["discoverSplitDecision"] = ( - DiscoverSavedQueryTypes.get_type_name( - DiscoverSavedQueryTypes.ERROR_EVENTS - ) + error_results["meta"][ + "discoverSplitDecision" + ] = DiscoverSavedQueryTypes.get_type_name( + DiscoverSavedQueryTypes.ERROR_EVENTS ) has_errors = len(error_results["data"]) > 0 except KeyError: @@ -562,10 +560,10 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save try: transaction_results = map["transactions"] - transaction_results["meta"]["discoverSplitDecision"] = ( - DiscoverSavedQueryTypes.get_type_name( - DiscoverSavedQueryTypes.TRANSACTION_LIKE - ) + transaction_results["meta"][ + "discoverSplitDecision" + ] = DiscoverSavedQueryTypes.get_type_name( + DiscoverSavedQueryTypes.TRANSACTION_LIKE ) has_transactions = len(transaction_results["data"]) > 0 except KeyError: diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index 45cc93fd942ef7..751b67e879a882 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -12,7 +12,6 @@ from sentry.api.base import region_silo_endpoint from sentry.api.bases import OrganizationEventsV2EndpointBase from sentry.constants import MAX_TOP_EVENTS -from sentry.middleware import is_frontend_request from sentry.models.dashboard_widget import DashboardWidget, DashboardWidgetTypes from sentry.models.organization import Organization from sentry.snuba import ( @@ -181,7 +180,7 @@ def check_if_results_have_data(self, results: SnubaTSResult | dict[str, SnubaTSR return has_data def get(self, request: Request, organization: Organization) -> Response: - query_source = QuerySource.FRONTEND if is_frontend_request(request) else QuerySource.API + query_source = self.get_request_source(request) with sentry_sdk.start_span(op="discover.endpoint", description="filter_params") as span: span.set_data("organization", organization) diff --git a/src/sentry/api/endpoints/organization_events_trace.py b/src/sentry/api/endpoints/organization_events_trace.py index ca98ce637cff47..c3f3877c03779c 100644 --- a/src/sentry/api/endpoints/organization_events_trace.py +++ b/src/sentry/api/endpoints/organization_events_trace.py @@ -1071,7 +1071,7 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht if event_id and not is_event_id(event_id): return Response({"detail": INVALID_ID_DETAILS.format("Event ID")}, status=400) - query_source = QuerySource.FRONTEND if is_frontend_request(request) else QuerySource.API + query_source = self.get_request_source(request) with handle_query_errors(): transaction_params = create_transaction_params( trace_id, snuba_params, query_source=query_source @@ -1719,7 +1719,7 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht return Response(status=404) update_snuba_params_with_timestamp(request, snuba_params) - query_source = QuerySource.FRONTEND if is_frontend_request(request) else QuerySource.API + query_source = self.get_request_source(request) meta_query = DiscoverQueryBuilder( dataset=Dataset.Discover, selected_columns=[ diff --git a/src/sentry/api/endpoints/organization_events_trends_v2.py b/src/sentry/api/endpoints/organization_events_trends_v2.py index 3cd21124fae157..1fa9a2de02e593 100644 --- a/src/sentry/api/endpoints/organization_events_trends_v2.py +++ b/src/sentry/api/endpoints/organization_events_trends_v2.py @@ -13,13 +13,11 @@ from sentry.api.bases import NoProjects, OrganizationEventsV2EndpointBase from sentry.api.paginator import GenericOffsetPaginator from sentry.api.utils import handle_query_errors -from sentry.middleware import is_frontend_request from sentry.search.events.constants import METRICS_GRANULARITIES from sentry.seer.breakpoints import detect_breakpoints from sentry.snuba import metrics_performance from sentry.snuba.discover import create_result_key, zerofill from sentry.snuba.metrics_performance import query as metrics_query -from sentry.snuba.query_sources import QuerySource from sentry.snuba.referrer import Referrer from sentry.types.ratelimit import RateLimit, RateLimitCategory from sentry.utils.iterators import chunked @@ -92,7 +90,7 @@ def get(self, request: Request, organization) -> Response: selected_columns = ["project_id", "transaction"] query = request.GET.get("query") - query_source = QuerySource.FRONTEND if is_frontend_request(request) else QuerySource.API + query_source = self.get_request_source(request) def get_top_events(user_query, params, event_limit, referrer): top_event_columns = selected_columns[:] From 1e9a12e0a59c9dc8f1b1d508b781014c78a5f0cf Mon Sep 17 00:00:00 2001 From: Athena Moghaddam Date: Tue, 6 Aug 2024 09:53:14 -0700 Subject: [PATCH 4/7] more merge oops --- src/sentry/api/endpoints/organization_events_trace.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/sentry/api/endpoints/organization_events_trace.py b/src/sentry/api/endpoints/organization_events_trace.py index c3f3877c03779c..89662dd97805d3 100644 --- a/src/sentry/api/endpoints/organization_events_trace.py +++ b/src/sentry/api/endpoints/organization_events_trace.py @@ -24,7 +24,6 @@ from sentry.api.utils import handle_query_errors, update_snuba_params_with_timestamp from sentry.eventstore.models import Event, GroupEvent from sentry.issues.issue_occurrence import IssueOccurrence -from sentry.middleware import is_frontend_request from sentry.models.group import Group from sentry.models.organization import Organization from sentry.models.project import Project @@ -1134,9 +1133,7 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht event_id, detailed, use_spans, - query_source=( - QuerySource.FRONTEND if is_frontend_request(request) else QuerySource.API - ), + query_source=self.get_request_source(request), ) ) @@ -1608,8 +1605,8 @@ def serialize_with_spans( warning_extra: dict[str, str], event_id: str | None, detailed: bool = False, + query_source: QuerySource | None = None, ) -> SerializedTrace: - query_source: QuerySource | None = (None,) root_traces: list[TraceEvent] = [] orphans: list[TraceEvent] = [] orphan_event_ids: set[str] = set() From a6dbf85998cf2c513ba5c861f572c9254cc2b477 Mon Sep 17 00:00:00 2001 From: Athena Moghaddam Date: Mon, 12 Aug 2024 21:16:32 -0700 Subject: [PATCH 5/7] fix typing --- src/sentry/api/base.py | 2 +- src/sentry/api/endpoints/organization_events_trace.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/sentry/api/base.py b/src/sentry/api/base.py index 6f8c38c1b53587..962b733c078b76 100644 --- a/src/sentry/api/base.py +++ b/src/sentry/api/base.py @@ -586,7 +586,7 @@ def paginate( self.add_cursor_headers(request, response, cursor_result) return response - def get_request_source(request: Request) -> QuerySource: + def get_request_source(self, request: Request) -> QuerySource: """ This is an estimate of query source. Treat it more like a good guess and don't write logic that depends on it. Used for monitoring only atm. diff --git a/src/sentry/api/endpoints/organization_events_trace.py b/src/sentry/api/endpoints/organization_events_trace.py index 89662dd97805d3..150254b4ee36bc 100644 --- a/src/sentry/api/endpoints/organization_events_trace.py +++ b/src/sentry/api/endpoints/organization_events_trace.py @@ -1148,6 +1148,7 @@ def serialize( event_id: str | None, detailed: bool = False, use_spans: bool = False, + query_source: QuerySource | None = None, ) -> Any: raise NotImplementedError From d4755403ad5f6c4428fc6dcbab2176f131cfa533 Mon Sep 17 00:00:00 2001 From: Athena Moghaddam Date: Thu, 15 Aug 2024 08:50:39 -0700 Subject: [PATCH 6/7] fix test --- src/sentry/snuba/issue_platform.py | 3 ++- src/sentry/snuba/metrics_enhanced_performance.py | 3 +++ src/sentry/snuba/metrics_summaries.py | 4 +++- src/sentry/snuba/profile_functions_metrics.py | 4 +++- 4 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/sentry/snuba/issue_platform.py b/src/sentry/snuba/issue_platform.py index 3c200c822eaa5c..ee2bbb40b478e6 100644 --- a/src/sentry/snuba/issue_platform.py +++ b/src/sentry/snuba/issue_platform.py @@ -42,6 +42,7 @@ def query( on_demand_metrics_enabled=False, on_demand_metrics_type: MetricSpecType | None = None, fallback_to_transactions=False, + query_source: QuerySource | None = None, ) -> EventsResponse: """ High-level API for doing arbitrary user queries against events. @@ -101,7 +102,7 @@ def query( ) if conditions is not None: builder.add_conditions(conditions) - result = builder.process_results(builder.run_query(referrer)) + result = builder.process_results(builder.run_query(referrer, query_source=query_source)) result["meta"]["tips"] = transform_tips(builder.tips) return result diff --git a/src/sentry/snuba/metrics_enhanced_performance.py b/src/sentry/snuba/metrics_enhanced_performance.py index 83ccc027c6df62..52caa91565da95 100644 --- a/src/sentry/snuba/metrics_enhanced_performance.py +++ b/src/sentry/snuba/metrics_enhanced_performance.py @@ -48,6 +48,7 @@ def query( on_demand_metrics_enabled: bool = False, on_demand_metrics_type: MetricSpecType | None = None, fallback_to_transactions: bool = False, + query_source: QuerySource | None = None, ): metrics_compatible = not equations dataset_reason = discover.DEFAULT_DATASET_REASON @@ -75,6 +76,7 @@ def query( use_metrics_layer, on_demand_metrics_enabled, on_demand_metrics_type=on_demand_metrics_type, + query_source=query_source, ) result["meta"]["datasetReason"] = dataset_reason @@ -113,6 +115,7 @@ def query( functions_acl=functions_acl, transform_alias_to_input_format=transform_alias_to_input_format, has_metrics=has_metrics, + query_source=query_source, ) results["meta"]["isMetricsData"] = False results["meta"]["isMetricsExtractedData"] = False diff --git a/src/sentry/snuba/metrics_summaries.py b/src/sentry/snuba/metrics_summaries.py index 8277714ff81dec..3abff797de7237 100644 --- a/src/sentry/snuba/metrics_summaries.py +++ b/src/sentry/snuba/metrics_summaries.py @@ -2,6 +2,7 @@ from sentry.search.events.types import QueryBuilderConfig from sentry.snuba.dataset import Dataset from sentry.snuba.metrics.extraction import MetricSpecType +from sentry.snuba.query_sources import QuerySource def query( @@ -30,6 +31,7 @@ def query( on_demand_metrics_enabled=False, on_demand_metrics_type: MetricSpecType | None = None, fallback_to_transactions=False, + query_source: QuerySource | None = None, ): builder = MetricsSummariesQueryBuilder( Dataset.MetricsSummaries, @@ -54,5 +56,5 @@ def query( ), ) - result = builder.process_results(builder.run_query(referrer)) + result = builder.process_results(builder.run_query(referrer, query_source=query_source)) return result diff --git a/src/sentry/snuba/profile_functions_metrics.py b/src/sentry/snuba/profile_functions_metrics.py index 6312941d7e078d..43d9dc1e84898d 100644 --- a/src/sentry/snuba/profile_functions_metrics.py +++ b/src/sentry/snuba/profile_functions_metrics.py @@ -13,6 +13,7 @@ from sentry.snuba import discover from sentry.snuba.dataset import Dataset from sentry.snuba.metrics.extraction import MetricSpecType +from sentry.snuba.query_sources import QuerySource from sentry.utils.snuba import SnubaTSResult logger = logging.getLogger(__name__) @@ -44,6 +45,7 @@ def query( on_demand_metrics_enabled: bool = False, on_demand_metrics_type: MetricSpecType | None = None, fallback_to_transactions: bool = False, + query_source: QuerySource | None = None, ): builder = ProfileFunctionsMetricsQueryBuilder( dataset=Dataset.PerformanceMetrics, @@ -69,7 +71,7 @@ def query( ), ) - result = builder.process_results(builder.run_query(referrer)) + result = builder.process_results(builder.run_query(referrer, query_source=query_source)) return result From 8ad6996ca52f809d8013be768f6a6e92df5fc7c3 Mon Sep 17 00:00:00 2001 From: Athena Moghaddam Date: Thu, 15 Aug 2024 11:57:46 -0700 Subject: [PATCH 7/7] more tests --- src/sentry/search/events/builder/metrics.py | 6 ++++-- src/sentry/snuba/discover.py | 6 ++++-- src/sentry/snuba/metrics_enhanced_performance.py | 3 +++ src/sentry/snuba/metrics_performance.py | 11 +++++++---- src/sentry/snuba/profile_functions_metrics.py | 3 ++- src/sentry/snuba/profiles.py | 3 ++- 6 files changed, 22 insertions(+), 10 deletions(-) diff --git a/src/sentry/search/events/builder/metrics.py b/src/sentry/search/events/builder/metrics.py index ef314e38458e5a..e208472a11cf11 100644 --- a/src/sentry/search/events/builder/metrics.py +++ b/src/sentry/search/events/builder/metrics.py @@ -1591,8 +1591,10 @@ def __init__( kwargs["config"] = config super().__init__(*args, **kwargs) - def run_query(self, referrer: str, use_cache: bool = False) -> Any: - result = super().run_query(referrer, use_cache) + def run_query( + self, referrer: str, use_cache: bool = False, query_source: QuerySource | None = None + ) -> Any: + result = super().run_query(referrer, use_cache, query_source=query_source) for row in result["data"]: for key, value in row.items(): if key in self.histogram_aliases: diff --git a/src/sentry/snuba/discover.py b/src/sentry/snuba/discover.py index d486cc0716d1a9..d3e19407dd61ee 100644 --- a/src/sentry/snuba/discover.py +++ b/src/sentry/snuba/discover.py @@ -819,6 +819,7 @@ def spans_histogram_query( use_metrics_layer: bool = False, on_demand_metrics_enabled: bool = False, on_demand_metrics_type: MetricSpecType | None = None, + query_source: QuerySource | None = None, ) -> EventsResponse | SnubaData: """ API for generating histograms for span exclusive time. @@ -883,7 +884,7 @@ def spans_histogram_query( Condition(Function("has", [builder.column("spans_group"), span.group]), Op.EQ, 1), ] ) - results = builder.run_query(referrer) + results = builder.run_query(referrer, query_source=query_source) if not normalize_results: return results @@ -911,6 +912,7 @@ def histogram_query( use_metrics_layer: bool = False, on_demand_metrics_enabled: bool = False, on_demand_metrics_type: MetricSpecType | None = None, + query_source: QuerySource | None = None, ): """ API for generating histograms for numeric columns. @@ -1002,7 +1004,7 @@ def histogram_query( ) if extra_conditions is not None: builder.add_conditions(extra_conditions) - results = builder.process_results(builder.run_query(referrer)) + results = builder.process_results(builder.run_query(referrer, query_source=query_source)) if not normalize_results: return results diff --git a/src/sentry/snuba/metrics_enhanced_performance.py b/src/sentry/snuba/metrics_enhanced_performance.py index 52caa91565da95..884089538effeb 100644 --- a/src/sentry/snuba/metrics_enhanced_performance.py +++ b/src/sentry/snuba/metrics_enhanced_performance.py @@ -325,6 +325,7 @@ def histogram_query( use_metrics_layer=False, on_demand_metrics_enabled=False, on_demand_metrics_type=None, + query_source: QuerySource | None = None, ): """ High-level API for doing arbitrary user timeseries queries against events. @@ -352,6 +353,7 @@ def histogram_query( extra_conditions, normalize_results, use_metrics_layer, + query_source=query_source, ) # raise Invalid Queries since the same thing will happen with discover except InvalidSearchQuery: @@ -381,5 +383,6 @@ def histogram_query( histogram_rows, extra_conditions, normalize_results, + query_source=query_source, ) return {} diff --git a/src/sentry/snuba/metrics_performance.py b/src/sentry/snuba/metrics_performance.py index 330c8ddef8c61a..824d5897f515a2 100644 --- a/src/sentry/snuba/metrics_performance.py +++ b/src/sentry/snuba/metrics_performance.py @@ -110,7 +110,8 @@ def bulk_timeseries_query( *, apply_formatting: Literal[False], query_source: QuerySource | None = None, -) -> EventsResponse: ... +) -> EventsResponse: + ... @overload @@ -131,7 +132,8 @@ def bulk_timeseries_query( on_demand_metrics_type: MetricSpecType | None = None, groupby: Column | None = None, query_source: QuerySource | None = None, -) -> SnubaTSResult: ... +) -> SnubaTSResult: + ... def bulk_timeseries_query( @@ -150,9 +152,9 @@ def bulk_timeseries_query( on_demand_metrics_enabled: bool = False, on_demand_metrics_type: MetricSpecType | None = None, groupby: Column | None = None, + query_source: QuerySource | None = None, *, apply_formatting: bool = True, - query_source: QuerySource | None = None, ) -> SnubaTSResult | EventsResponse: """ High-level API for doing *bulk* arbitrary user timeseries queries against events. @@ -553,6 +555,7 @@ def histogram_query( extra_conditions=None, normalize_results=True, use_metrics_layer=True, + query_source: QuerySource | None = None, ): """ API for generating histograms for numeric columns. @@ -619,7 +622,7 @@ def histogram_query( ) if extra_conditions is not None: builder.add_conditions(extra_conditions) - results = builder.run_query(referrer) + results = builder.run_query(referrer, query_source=query_source) # TODO: format to match non-metric-result if not normalize_results: diff --git a/src/sentry/snuba/profile_functions_metrics.py b/src/sentry/snuba/profile_functions_metrics.py index 43d9dc1e84898d..d0865141342144 100644 --- a/src/sentry/snuba/profile_functions_metrics.py +++ b/src/sentry/snuba/profile_functions_metrics.py @@ -91,6 +91,7 @@ def timeseries_query( on_demand_metrics_enabled: bool = False, on_demand_metrics_type: MetricSpecType | None = None, groupby: Column | None = None, + query_source: QuerySource | None = None, ) -> SnubaTSResult: """ High-level API for doing arbitrary user timeseries queries against events. @@ -114,7 +115,7 @@ def timeseries_query( use_metrics_layer=use_metrics_layer, ), ) - result = metrics_query.run_query(referrer) + result = metrics_query.run_query(referrer, query_source=query_source) result = metrics_query.process_results(result) result["data"] = ( diff --git a/src/sentry/snuba/profiles.py b/src/sentry/snuba/profiles.py index 4688e69ca13a27..fad4d3772af68c 100644 --- a/src/sentry/snuba/profiles.py +++ b/src/sentry/snuba/profiles.py @@ -36,6 +36,7 @@ def query( on_demand_metrics_enabled: bool = False, on_demand_metrics_type: MetricSpecType | None = None, fallback_to_transactions=False, + query_source: QuerySource | None = None, ) -> Any: if not selected_columns: raise InvalidSearchQuery("No columns selected") @@ -57,7 +58,7 @@ def query( functions_acl=functions_acl, ), ) - result = builder.process_results(builder.run_query(referrer)) + result = builder.process_results(builder.run_query(referrer, query_source=query_source)) result["meta"]["tips"] = transform_tips(builder.tips) return result