From f24f9d53e0bd1cd94a8850c9086a848fa618bef8 Mon Sep 17 00:00:00 2001 From: Athena Moghaddam <132939361+sentaur-athena@users.noreply.github.com> Date: Thu, 15 Aug 2024 13:26:32 -0700 Subject: [PATCH] feat(apis): Pass query source to snuba - phase 3 (#73499) Continuing https://github.com/getsentry/sentry/pull/73497 to pass query source to snuba for more debuggability. OrganizationEventsTraceBaseEndpoint OrganizationsEventsNewTrendsStatsEndpoint OrganizationsEventsEndpoint --- src/sentry/api/base.py | 19 +++- .../api/endpoints/organization_events.py | 2 + .../endpoints/organization_events_stats.py | 3 +- .../endpoints/organization_events_trace.py | 95 ++++++++++++++++--- .../organization_events_trends_v2.py | 3 + src/sentry/search/events/builder/metrics.py | 6 +- src/sentry/snuba/discover.py | 6 +- src/sentry/snuba/issue_platform.py | 3 +- .../snuba/metrics_enhanced_performance.py | 6 ++ src/sentry/snuba/metrics_performance.py | 10 +- src/sentry/snuba/metrics_summaries.py | 4 +- src/sentry/snuba/profile_functions_metrics.py | 3 +- src/sentry/snuba/profiles.py | 3 +- src/sentry/snuba/transactions.py | 2 + 14 files changed, 135 insertions(+), 30 deletions(-) diff --git a/src/sentry/api/base.py b/src/sentry/api/base.py index 3358b50d766704..962b733c078b76 100644 --- a/src/sentry/api/base.py +++ b/src/sentry/api/base.py @@ -31,10 +31,12 @@ from sentry.apidocs.hooks import HTTP_METHOD_NAME from sentry.auth import access from sentry.auth.staff import has_staff_option +from sentry.middleware import is_frontend_request from sentry.models.environment import Environment from sentry.organizations.absolute_url import generate_organization_url from sentry.ratelimits.config import DEFAULT_RATE_LIMIT_CONFIG, RateLimitConfig from sentry.silo.base import SiloLimit, SiloMode +from sentry.snuba.query_sources import QuerySource from sentry.types.ratelimit import RateLimit, RateLimitCategory from sentry.utils.audit import create_audit_entry from sentry.utils.cursors import Cursor @@ -221,9 +223,11 @@ class Endpoint(APIView): owner: ApiOwner = ApiOwner.UNOWNED publish_status: dict[HTTP_METHOD_NAME, ApiPublishStatus] = {} - rate_limits: RateLimitConfig | dict[str, dict[RateLimitCategory, RateLimit]] | Callable[ - ..., RateLimitConfig | dict[str, dict[RateLimitCategory, RateLimit]] - ] = DEFAULT_RATE_LIMIT_CONFIG + rate_limits: ( + RateLimitConfig + | dict[str, dict[RateLimitCategory, RateLimit]] + | Callable[..., RateLimitConfig | dict[str, dict[RateLimitCategory, RateLimit]]] + ) = DEFAULT_RATE_LIMIT_CONFIG enforce_rate_limit: bool = settings.SENTRY_RATELIMITER_ENABLED snuba_methods: list[HTTP_METHOD_NAME] = [] @@ -582,6 +586,15 @@ def paginate( self.add_cursor_headers(request, response, cursor_result) return response + def get_request_source(self, request: Request) -> QuerySource: + """ + This is an estimate of query source. Treat it more like a good guess and + don't write logic that depends on it. Used for monitoring only atm. + """ + if is_frontend_request(request): + return QuerySource.FRONTEND + return QuerySource.API + class EnvironmentMixin: def _get_environment_func(self, request: Request, organization_id): diff --git a/src/sentry/api/endpoints/organization_events.py b/src/sentry/api/endpoints/organization_events.py index 6e4b32ea3109bf..cf4fb43330d9f8 100644 --- a/src/sentry/api/endpoints/organization_events.py +++ b/src/sentry/api/endpoints/organization_events.py @@ -395,6 +395,7 @@ def get(self, request: Request, organization) -> Response: referrer = Referrer.API_ORGANIZATION_EVENTS.value def _data_fn(scoped_dataset, offset, limit, query) -> dict[str, Any]: + query_source = self.get_request_source(request) return scoped_dataset.query( selected_columns=self.get_field_list(organization, request), query=query, @@ -415,6 +416,7 @@ def _data_fn(scoped_dataset, offset, limit, query) -> dict[str, Any]: use_metrics_layer=batch_features.get("organizations:use-metrics-layer", False), on_demand_metrics_enabled=on_demand_metrics_enabled, on_demand_metrics_type=on_demand_metrics_type, + query_source=query_source, ) @sentry_sdk.tracing.trace diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index 242e710de4fc92..cc761acba80ee7 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -12,7 +12,6 @@ from sentry.api.base import region_silo_endpoint from sentry.api.bases import OrganizationEventsV2EndpointBase from sentry.constants import MAX_TOP_EVENTS -from sentry.middleware import is_frontend_request from sentry.models.dashboard_widget import DashboardWidget, DashboardWidgetTypes from sentry.models.organization import Organization from sentry.search.events.types import SnubaParams @@ -182,7 +181,7 @@ def check_if_results_have_data(self, results: SnubaTSResult | dict[str, SnubaTSR return has_data def get(self, request: Request, organization: Organization) -> Response: - query_source = QuerySource.FRONTEND if is_frontend_request(request) else QuerySource.API + query_source = self.get_request_source(request) with sentry_sdk.start_span(op="discover.endpoint", description="filter_params") as span: span.set_data("organization", organization) diff --git a/src/sentry/api/endpoints/organization_events_trace.py b/src/sentry/api/endpoints/organization_events_trace.py index 6561732a861d2c..5d509b985534c9 100644 --- a/src/sentry/api/endpoints/organization_events_trace.py +++ b/src/sentry/api/endpoints/organization_events_trace.py @@ -32,6 +32,7 @@ from sentry.search.events.builder.spans_indexed import SpansIndexedQueryBuilder from sentry.search.events.types import QueryBuilderConfig, SnubaParams from sentry.snuba.dataset import Dataset +from sentry.snuba.query_sources import QuerySource from sentry.snuba.referrer import Referrer from sentry.utils.iterators import chunked from sentry.utils.numbers import base32_encode, format_grouped_length @@ -192,11 +193,13 @@ def __init__( light: bool = False, snuba_params: SnubaParams | None = None, span_serialized: bool = False, + query_source: QuerySource | None = QuerySource.SENTRY_BACKEND, ) -> None: self.event: SnubaTransaction = event self.errors: list[TraceError] = [] self.children: list[TraceEvent] = [] self.performance_issues: list[TracePerformanceIssue] = [] + self.query_source = query_source # Can be None on the light trace when we don't know the parent self.parent_event_id: str | None = parent @@ -306,7 +309,8 @@ def load_performance_issues(self, light: bool, snuba_params: SnubaParams | None) ) occurrence_ids = occurrence_query.process_results( occurrence_query.run_query( - referrer=Referrer.API_TRACE_VIEW_GET_OCCURRENCE_IDS.value + referrer=Referrer.API_TRACE_VIEW_GET_OCCURRENCE_IDS.value, + query_source=self.query_source, ) )["data"] @@ -499,7 +503,11 @@ def child_sort_key(item: TraceEvent) -> list[int | str]: ] -def count_performance_issues(trace_id: str, params: SnubaParams) -> int: +def count_performance_issues( + trace_id: str, + params: SnubaParams, + query_source: QuerySource | None = QuerySource.SENTRY_BACKEND, +) -> int: transaction_query = DiscoverQueryBuilder( Dataset.IssuePlatform, params={}, @@ -510,7 +518,8 @@ def count_performance_issues(trace_id: str, params: SnubaParams) -> int: ) transaction_query.columns.append(Function("count()", alias="total_groups")) count = transaction_query.run_query( - referrer=Referrer.API_TRACE_VIEW_COUNT_PERFORMANCE_ISSUES.value + referrer=Referrer.API_TRACE_VIEW_COUNT_PERFORMANCE_ISSUES.value, + query_source=query_source, ) return count["data"][0].get("total_groups", 0) @@ -519,6 +528,7 @@ def count_performance_issues(trace_id: str, params: SnubaParams) -> int: def create_transaction_params( trace_id: str, snuba_params: SnubaParams, + query_source: QuerySource | None = QuerySource.SENTRY_BACKEND, ) -> SnubaParams: """Can't use the transaction params for errors since traces can be errors only""" query_metadata = options.get("performance.traces.query_timestamp_projects") @@ -537,7 +547,9 @@ def create_transaction_params( "project.id", ], ) - results = metadata_query.run_query(Referrer.API_TRACE_VIEW_GET_TIMESTAMP_PROJECTS.value) + results = metadata_query.run_query( + Referrer.API_TRACE_VIEW_GET_TIMESTAMP_PROJECTS.value, query_source=query_source + ) results = metadata_query.process_results(results) project_id_set = set() min_timestamp = None @@ -582,6 +594,7 @@ def query_trace_data( limit: int, event_id: str | None, use_spans: bool, + query_source: QuerySource | None = QuerySource.SENTRY_BACKEND, ) -> tuple[Sequence[SnubaTransaction], Sequence[SnubaError]]: transaction_columns = [ "id", @@ -671,6 +684,7 @@ def query_trace_data( occurrence_query.get_snql_query(), ], referrer=Referrer.API_TRACE_VIEW_GET_EVENTS.value, + query_source=query_source, ) transformed_results = [ @@ -773,6 +787,7 @@ def augment_transactions_with_spans( errors: Sequence[SnubaError], trace_id: str, params: SnubaParams, + query_source: QuerySource | None = QuerySource.SENTRY_BACKEND, ) -> Sequence[SnubaTransaction]: """Augment the list of transactions with parent, error and problem data""" with sentry_sdk.start_span(op="augment.transactions", description="setup"): @@ -881,6 +896,7 @@ def augment_transactions_with_spans( results = bulk_snuba_queries( [query.get_snql_query() for query in queries], referrer=Referrer.API_TRACE_VIEW_GET_PARENTS.value, + query_source=query_source, ) parents_results = results[0] for result, query in zip(results, queries): @@ -891,7 +907,8 @@ def augment_transactions_with_spans( else: parents_query = build_span_query(trace_id, spans_params, list(query_spans)) parents_results = parents_query.run_query( - referrer=Referrer.API_TRACE_VIEW_GET_PARENTS.value + referrer=Referrer.API_TRACE_VIEW_GET_PARENTS.value, + query_source=query_source, ) if len(parents_results) == parents_query.limit.limit: hit_limit = True @@ -1051,19 +1068,38 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht if event_id and not is_event_id(event_id): return Response({"detail": INVALID_ID_DETAILS.format("Event ID")}, status=400) + query_source = self.get_request_source(request) with handle_query_errors(): - transaction_params = create_transaction_params(trace_id, snuba_params) + transaction_params = create_transaction_params( + trace_id, snuba_params, query_source=query_source + ) if use_spans: transactions, errors = query_trace_data( - trace_id, snuba_params, transaction_params, limit, event_id, use_spans + trace_id, + snuba_params, + transaction_params, + limit, + event_id, + use_spans, + query_source=query_source, ) transactions = augment_transactions_with_spans( - transactions, errors, trace_id, snuba_params + transactions, + errors, + trace_id, + snuba_params, + query_source=query_source, ) else: transactions, errors = query_trace_data( - trace_id, snuba_params, transaction_params, limit, None, False + trace_id, + snuba_params, + transaction_params, + limit, + None, + False, + query_source=query_source, ) self.record_analytics(transactions, trace_id, self.request.user.id, organization.id) @@ -1095,6 +1131,7 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht event_id, detailed, use_spans, + query_source=self.get_request_source(request), ) ) @@ -1109,6 +1146,7 @@ def serialize( event_id: str | None, detailed: bool = False, use_spans: bool = False, + query_source: QuerySource | None = None, ) -> Any: raise NotImplementedError @@ -1181,6 +1219,7 @@ def serialize( event_id: str | None, detailed: bool = False, use_spans: bool = False, + query_source: QuerySource | None = None, ) -> dict[str, list[LightResponse | TraceError]]: """Because the light endpoint could potentially have gaps between root and event we return a flattened list""" if use_spans: @@ -1247,13 +1286,19 @@ def serialize( 0, True, snuba_params=snuba_params, + query_source=query_source, ) ) current_generation = 1 break current_event = TraceEvent( - snuba_event, root_id, current_generation, True, snuba_params=snuba_params + snuba_event, + root_id, + current_generation, + True, + snuba_params=snuba_params, + query_source=query_source, ) trace_results.append(current_event) @@ -1286,6 +1331,7 @@ def serialize( ), True, snuba_params=snuba_params, + query_source=query_source, ) for child_event in child_events ] @@ -1348,6 +1394,7 @@ def serialize( event_id: str | None, detailed: bool = False, use_spans: bool = False, + query_source: QuerySource | None = None, ) -> SerializedTrace: """For the full event trace, we return the results as a graph instead of a flattened list @@ -1363,6 +1410,7 @@ def serialize( warning_extra, event_id, detailed, + query_source=query_source, ) return results @@ -1382,7 +1430,9 @@ def serialize( if roots: results_map[None] = [] for root in roots: - root_event = TraceEvent(root, None, 0, snuba_params=snuba_params) + root_event = TraceEvent( + root, None, 0, snuba_params=snuba_params, query_source=query_source + ) parent_events[root["id"]] = root_event results_map[None].append(root_event) to_check.append(root) @@ -1403,7 +1453,11 @@ def serialize( parent_map[parent_span_id] = siblings previous_event = parent_events[current_event["id"]] = TraceEvent( - current_event, None, 0, snuba_params=snuba_params + current_event, + None, + 0, + snuba_params=snuba_params, + query_source=query_source, ) # Used to avoid removing the orphan from results entirely if we loop @@ -1478,6 +1532,7 @@ def serialize( else None ), snuba_params=snuba_params, + query_source=query_source, ) # Add this event to its parent's children previous_event.children.append(parent_events[child_event["id"]]) @@ -1549,6 +1604,7 @@ def serialize_with_spans( warning_extra: dict[str, str], event_id: str | None, detailed: bool = False, + query_source: QuerySource | None = None, ) -> SerializedTrace: root_traces: list[TraceEvent] = [] orphans: list[TraceEvent] = [] @@ -1563,7 +1619,11 @@ def serialize_with_spans( for transaction in transactions: parent_id = transaction["trace.parent_transaction"] serialized_transaction = TraceEvent( - transaction, parent_id, -1, span_serialized=True + transaction, + parent_id, + -1, + span_serialized=True, + query_source=query_source, ) if parent_id is None: if transaction["trace.parent_span"]: @@ -1640,6 +1700,7 @@ class OrganizationEventsTraceMetaEndpoint(OrganizationEventsV2EndpointBase): publish_status = { "GET": ApiPublishStatus.PRIVATE, } + snuba_methods = ["GET"] def get(self, request: Request, organization: Organization, trace_id: str) -> HttpResponse: if not self.has_feature(organization, request): @@ -1652,7 +1713,7 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht return Response(status=404) update_snuba_params_with_timestamp(request, snuba_params) - + query_source = self.get_request_source(request) meta_query = DiscoverQueryBuilder( dataset=Dataset.Discover, selected_columns=[ @@ -1677,6 +1738,7 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht query=f"trace:{trace_id}", limit=10_000, ) + with handle_query_errors(): results = bulk_snuba_queries( [ @@ -1684,13 +1746,16 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht transaction_children_query.get_snql_query(), ], referrer=Referrer.API_TRACE_VIEW_GET_META.value, + query_source=query_source, ) meta_result, children_result = results[0], results[1] if len(meta_result["data"]) == 0: return Response(status=404) # Merge the result back into the first query meta_result["data"][0]["performance_issues"] = count_performance_issues( - trace_id, snuba_params + trace_id, + snuba_params, + query_source=query_source, ) return Response(self.serialize(meta_result["data"][0], children_result["data"])) diff --git a/src/sentry/api/endpoints/organization_events_trends_v2.py b/src/sentry/api/endpoints/organization_events_trends_v2.py index e6d439581a290f..b1cb83b4947b5b 100644 --- a/src/sentry/api/endpoints/organization_events_trends_v2.py +++ b/src/sentry/api/endpoints/organization_events_trends_v2.py @@ -90,6 +90,7 @@ def get(self, request: Request, organization) -> Response: selected_columns = ["project_id", "transaction"] query = request.GET.get("query") + query_source = self.get_request_source(request) def get_top_events(user_query, snuba_params, event_limit, referrer): top_event_columns = selected_columns[:] @@ -109,6 +110,7 @@ def get_top_events(user_query, snuba_params, event_limit, referrer): auto_aggregations=True, use_aggregate_conditions=True, granularity=DAY_GRANULARITY_IN_SECONDS, + query_source=query_source, ) def generate_top_transaction_query(events): @@ -154,6 +156,7 @@ def get_timeseries(top_events, _, rollup, zerofill_results): referrer=Referrer.API_TRENDS_GET_EVENT_STATS_V2_TIMESERIES.value, groupby=[Column("project_id"), Column("transaction")], apply_formatting=False, + query_source=query_source, ) # Parse results diff --git a/src/sentry/search/events/builder/metrics.py b/src/sentry/search/events/builder/metrics.py index 70b2433e2c9c43..4d0785d23a0023 100644 --- a/src/sentry/search/events/builder/metrics.py +++ b/src/sentry/search/events/builder/metrics.py @@ -1589,8 +1589,10 @@ def __init__( kwargs["config"] = config super().__init__(*args, **kwargs) - def run_query(self, referrer: str, use_cache: bool = False) -> Any: - result = super().run_query(referrer, use_cache) + def run_query( + self, referrer: str, use_cache: bool = False, query_source: QuerySource | None = None + ) -> Any: + result = super().run_query(referrer, use_cache, query_source=query_source) for row in result["data"]: for key, value in row.items(): if key in self.histogram_aliases: diff --git a/src/sentry/snuba/discover.py b/src/sentry/snuba/discover.py index a2c07be7a3aadd..73b29a53ad1346 100644 --- a/src/sentry/snuba/discover.py +++ b/src/sentry/snuba/discover.py @@ -835,6 +835,7 @@ def spans_histogram_query( use_metrics_layer: bool = False, on_demand_metrics_enabled: bool = False, on_demand_metrics_type: MetricSpecType | None = None, + query_source: QuerySource | None = None, ) -> EventsResponse | SnubaData: """ API for generating histograms for span exclusive time. @@ -899,7 +900,7 @@ def spans_histogram_query( Condition(Function("has", [builder.column("spans_group"), span.group]), Op.EQ, 1), ] ) - results = builder.run_query(referrer) + results = builder.run_query(referrer, query_source=query_source) if not normalize_results: return results @@ -927,6 +928,7 @@ def histogram_query( use_metrics_layer: bool = False, on_demand_metrics_enabled: bool = False, on_demand_metrics_type: MetricSpecType | None = None, + query_source: QuerySource | None = None, ): """ API for generating histograms for numeric columns. @@ -1018,7 +1020,7 @@ def histogram_query( ) if extra_conditions is not None: builder.add_conditions(extra_conditions) - results = builder.process_results(builder.run_query(referrer)) + results = builder.process_results(builder.run_query(referrer, query_source=query_source)) if not normalize_results: return results diff --git a/src/sentry/snuba/issue_platform.py b/src/sentry/snuba/issue_platform.py index 3c200c822eaa5c..ee2bbb40b478e6 100644 --- a/src/sentry/snuba/issue_platform.py +++ b/src/sentry/snuba/issue_platform.py @@ -42,6 +42,7 @@ def query( on_demand_metrics_enabled=False, on_demand_metrics_type: MetricSpecType | None = None, fallback_to_transactions=False, + query_source: QuerySource | None = None, ) -> EventsResponse: """ High-level API for doing arbitrary user queries against events. @@ -101,7 +102,7 @@ def query( ) if conditions is not None: builder.add_conditions(conditions) - result = builder.process_results(builder.run_query(referrer)) + result = builder.process_results(builder.run_query(referrer, query_source=query_source)) result["meta"]["tips"] = transform_tips(builder.tips) return result diff --git a/src/sentry/snuba/metrics_enhanced_performance.py b/src/sentry/snuba/metrics_enhanced_performance.py index 83ccc027c6df62..884089538effeb 100644 --- a/src/sentry/snuba/metrics_enhanced_performance.py +++ b/src/sentry/snuba/metrics_enhanced_performance.py @@ -48,6 +48,7 @@ def query( on_demand_metrics_enabled: bool = False, on_demand_metrics_type: MetricSpecType | None = None, fallback_to_transactions: bool = False, + query_source: QuerySource | None = None, ): metrics_compatible = not equations dataset_reason = discover.DEFAULT_DATASET_REASON @@ -75,6 +76,7 @@ def query( use_metrics_layer, on_demand_metrics_enabled, on_demand_metrics_type=on_demand_metrics_type, + query_source=query_source, ) result["meta"]["datasetReason"] = dataset_reason @@ -113,6 +115,7 @@ def query( functions_acl=functions_acl, transform_alias_to_input_format=transform_alias_to_input_format, has_metrics=has_metrics, + query_source=query_source, ) results["meta"]["isMetricsData"] = False results["meta"]["isMetricsExtractedData"] = False @@ -322,6 +325,7 @@ def histogram_query( use_metrics_layer=False, on_demand_metrics_enabled=False, on_demand_metrics_type=None, + query_source: QuerySource | None = None, ): """ High-level API for doing arbitrary user timeseries queries against events. @@ -349,6 +353,7 @@ def histogram_query( extra_conditions, normalize_results, use_metrics_layer, + query_source=query_source, ) # raise Invalid Queries since the same thing will happen with discover except InvalidSearchQuery: @@ -378,5 +383,6 @@ def histogram_query( histogram_rows, extra_conditions, normalize_results, + query_source=query_source, ) return {} diff --git a/src/sentry/snuba/metrics_performance.py b/src/sentry/snuba/metrics_performance.py index 7ea9308ab69bfc..824d5897f515a2 100644 --- a/src/sentry/snuba/metrics_performance.py +++ b/src/sentry/snuba/metrics_performance.py @@ -109,6 +109,7 @@ def bulk_timeseries_query( groupby: Column | None = None, *, apply_formatting: Literal[False], + query_source: QuerySource | None = None, ) -> EventsResponse: ... @@ -130,6 +131,7 @@ def bulk_timeseries_query( on_demand_metrics_enabled: bool = False, on_demand_metrics_type: MetricSpecType | None = None, groupby: Column | None = None, + query_source: QuerySource | None = None, ) -> SnubaTSResult: ... @@ -150,6 +152,7 @@ def bulk_timeseries_query( on_demand_metrics_enabled: bool = False, on_demand_metrics_type: MetricSpecType | None = None, groupby: Column | None = None, + query_source: QuerySource | None = None, *, apply_formatting: bool = True, ) -> SnubaTSResult | EventsResponse: @@ -186,7 +189,9 @@ def bulk_timeseries_query( metrics_queries.append(snql_query[0]) metrics_referrer = referrer + ".metrics-enhanced" - bulk_result = bulk_snuba_queries(metrics_queries, metrics_referrer) + bulk_result = bulk_snuba_queries( + metrics_queries, metrics_referrer, query_source=query_source + ) _result: dict[str, Any] = {"data": []} for br in bulk_result: _result["data"] = [*_result["data"], *br["data"]] @@ -550,6 +555,7 @@ def histogram_query( extra_conditions=None, normalize_results=True, use_metrics_layer=True, + query_source: QuerySource | None = None, ): """ API for generating histograms for numeric columns. @@ -616,7 +622,7 @@ def histogram_query( ) if extra_conditions is not None: builder.add_conditions(extra_conditions) - results = builder.run_query(referrer) + results = builder.run_query(referrer, query_source=query_source) # TODO: format to match non-metric-result if not normalize_results: diff --git a/src/sentry/snuba/metrics_summaries.py b/src/sentry/snuba/metrics_summaries.py index 8277714ff81dec..3abff797de7237 100644 --- a/src/sentry/snuba/metrics_summaries.py +++ b/src/sentry/snuba/metrics_summaries.py @@ -2,6 +2,7 @@ from sentry.search.events.types import QueryBuilderConfig from sentry.snuba.dataset import Dataset from sentry.snuba.metrics.extraction import MetricSpecType +from sentry.snuba.query_sources import QuerySource def query( @@ -30,6 +31,7 @@ def query( on_demand_metrics_enabled=False, on_demand_metrics_type: MetricSpecType | None = None, fallback_to_transactions=False, + query_source: QuerySource | None = None, ): builder = MetricsSummariesQueryBuilder( Dataset.MetricsSummaries, @@ -54,5 +56,5 @@ def query( ), ) - result = builder.process_results(builder.run_query(referrer)) + result = builder.process_results(builder.run_query(referrer, query_source=query_source)) return result diff --git a/src/sentry/snuba/profile_functions_metrics.py b/src/sentry/snuba/profile_functions_metrics.py index c8a71f104991e3..d0865141342144 100644 --- a/src/sentry/snuba/profile_functions_metrics.py +++ b/src/sentry/snuba/profile_functions_metrics.py @@ -45,6 +45,7 @@ def query( on_demand_metrics_enabled: bool = False, on_demand_metrics_type: MetricSpecType | None = None, fallback_to_transactions: bool = False, + query_source: QuerySource | None = None, ): builder = ProfileFunctionsMetricsQueryBuilder( dataset=Dataset.PerformanceMetrics, @@ -70,7 +71,7 @@ def query( ), ) - result = builder.process_results(builder.run_query(referrer)) + result = builder.process_results(builder.run_query(referrer, query_source=query_source)) return result diff --git a/src/sentry/snuba/profiles.py b/src/sentry/snuba/profiles.py index 4688e69ca13a27..fad4d3772af68c 100644 --- a/src/sentry/snuba/profiles.py +++ b/src/sentry/snuba/profiles.py @@ -36,6 +36,7 @@ def query( on_demand_metrics_enabled: bool = False, on_demand_metrics_type: MetricSpecType | None = None, fallback_to_transactions=False, + query_source: QuerySource | None = None, ) -> Any: if not selected_columns: raise InvalidSearchQuery("No columns selected") @@ -57,7 +58,7 @@ def query( functions_acl=functions_acl, ), ) - result = builder.process_results(builder.run_query(referrer)) + result = builder.process_results(builder.run_query(referrer, query_source=query_source)) result["meta"]["tips"] = transform_tips(builder.tips) return result diff --git a/src/sentry/snuba/transactions.py b/src/sentry/snuba/transactions.py index 7287c6133b6f8a..3b5a10f7083082 100644 --- a/src/sentry/snuba/transactions.py +++ b/src/sentry/snuba/transactions.py @@ -42,6 +42,7 @@ def query( on_demand_metrics_type: MetricSpecType | None = None, dataset: Dataset = Dataset.Discover, fallback_to_transactions: bool = False, + query_source: QuerySource | None = None, ) -> EventsResponse: return discover.query( selected_columns, @@ -70,6 +71,7 @@ def query( on_demand_metrics_type=on_demand_metrics_type, dataset=Dataset.Transactions, fallback_to_transactions=fallback_to_transactions, + query_source=query_source, )