Skip to content

Commit

Permalink
feat(apis): Pass query source to snuba - phase 3
Browse files Browse the repository at this point in the history
  • Loading branch information
sentaur-athena committed Aug 6, 2024
1 parent 2527917 commit a3ab8a9
Show file tree
Hide file tree
Showing 5 changed files with 77 additions and 40 deletions.
32 changes: 18 additions & 14 deletions src/sentry/api/endpoints/organization_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from sentry.apidocs.utils import inline_sentry_response_serializer
from sentry.discover.models import DiscoverSavedQuery, DiscoverSavedQueryTypes
from sentry.exceptions import InvalidParams
from sentry.middleware import is_frontend_request
from sentry.models.dashboard_widget import DashboardWidget, DashboardWidgetTypes
from sentry.models.organization import Organization
from sentry.snuba import (
Expand All @@ -31,6 +32,7 @@
transactions,
)
from sentry.snuba.metrics.extraction import MetricSpecType
from sentry.snuba.query_sources import QuerySource
from sentry.snuba.referrer import Referrer
from sentry.snuba.utils import dataset_split_decision_inferred_from_query, get_dataset
from sentry.types.ratelimit import RateLimit, RateLimitCategory
Expand Down Expand Up @@ -375,6 +377,7 @@ def get(self, request: Request, organization) -> Response:
referrer = Referrer.API_ORGANIZATION_EVENTS.value

def _data_fn(scoped_dataset, offset, limit, query) -> dict[str, Any]:
query_source = QuerySource.FRONTEND if is_frontend_request(request) else QuerySource.API
return scoped_dataset.query(
selected_columns=self.get_field_list(organization, request),
query=query,
Expand All @@ -395,6 +398,7 @@ def _data_fn(scoped_dataset, offset, limit, query) -> dict[str, Any]:
use_metrics_layer=batch_features.get("organizations:use-metrics-layer", False),
on_demand_metrics_enabled=on_demand_metrics_enabled,
on_demand_metrics_type=on_demand_metrics_type,
query_source=query_source,
)

@sentry_sdk.tracing.trace
Expand Down Expand Up @@ -453,14 +457,14 @@ def _dashboards_data_fn(scoped_dataset, offset, limit, scoped_query, dashboard_w
if decision == DashboardWidgetTypes.DISCOVER:
return _data_fn(discover, offset, limit, scoped_query)
elif decision == DashboardWidgetTypes.TRANSACTION_LIKE:
original_results["meta"][
"discoverSplitDecision"
] = DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.TRANSACTION_LIKE)
original_results["meta"]["discoverSplitDecision"] = (
DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.TRANSACTION_LIKE)
)
return original_results
elif decision == DashboardWidgetTypes.ERROR_EVENTS and error_results:
error_results["meta"][
"discoverSplitDecision"
] = DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.ERROR_EVENTS)
error_results["meta"]["discoverSplitDecision"] = (
DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.ERROR_EVENTS)
)
return error_results
else:
return original_results
Expand Down Expand Up @@ -533,21 +537,21 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save

try:
error_results = map["errors"]
error_results["meta"][
"discoverSplitDecision"
] = DiscoverSavedQueryTypes.get_type_name(
DiscoverSavedQueryTypes.ERROR_EVENTS
error_results["meta"]["discoverSplitDecision"] = (
DiscoverSavedQueryTypes.get_type_name(
DiscoverSavedQueryTypes.ERROR_EVENTS
)
)
has_errors = len(error_results["data"]) > 0
except KeyError:
error_results = None

try:
transaction_results = map["transactions"]
transaction_results["meta"][
"discoverSplitDecision"
] = DiscoverSavedQueryTypes.get_type_name(
DiscoverSavedQueryTypes.TRANSACTION_LIKE
transaction_results["meta"]["discoverSplitDecision"] = (
DiscoverSavedQueryTypes.get_type_name(
DiscoverSavedQueryTypes.TRANSACTION_LIKE
)
)
has_transactions = len(transaction_results["data"]) > 0
except KeyError:
Expand Down
61 changes: 46 additions & 15 deletions src/sentry/api/endpoints/organization_events_trace.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from sentry.api.utils import handle_query_errors, update_snuba_params_with_timestamp
from sentry.eventstore.models import Event
from sentry.issues.issue_occurrence import IssueOccurrence
from sentry.middleware import is_frontend_request
from sentry.models.group import Group
from sentry.models.organization import Organization
from sentry.models.project import Project
Expand All @@ -33,6 +34,7 @@
from sentry.search.events.types import QueryBuilderConfig, SnubaParams
from sentry.snuba import discover
from sentry.snuba.dataset import Dataset
from sentry.snuba.query_sources import QuerySource
from sentry.snuba.referrer import Referrer
from sentry.utils.iterators import chunked
from sentry.utils.numbers import base32_encode, format_grouped_length
Expand Down Expand Up @@ -193,11 +195,13 @@ def __init__(
light: bool = False,
snuba_params: SnubaParams | None = None,
span_serialized: bool = False,
query_source: QuerySource | None = QuerySource.SENTRY_BACKEND,
) -> None:
self.event: SnubaTransaction = event
self.errors: list[TraceError] = []
self.children: list[TraceEvent] = []
self.performance_issues: list[TracePerformanceIssue] = []
self.query_source = query_source

# Can be None on the light trace when we don't know the parent
self.parent_event_id: str | None = parent
Expand Down Expand Up @@ -307,7 +311,8 @@ def load_performance_issues(self, light: bool, snuba_params: SnubaParams | None)
)
occurrence_ids = occurrence_query.process_results(
occurrence_query.run_query(
referrer=Referrer.API_TRACE_VIEW_GET_OCCURRENCE_IDS.value
referrer=Referrer.API_TRACE_VIEW_GET_OCCURRENCE_IDS.value,
query_source=self.query_source,
)
)["data"]

Expand Down Expand Up @@ -500,7 +505,7 @@ def child_sort_key(item: TraceEvent) -> list[int | str]:
]


def count_performance_issues(trace_id: str, params: SnubaParams) -> int:
def count_performance_issues(trace_id: str, params: SnubaParams, query_source: QuerySource | None = QuerySource.SENTRY_BACKEND) -> int:
transaction_query = DiscoverQueryBuilder(
Dataset.IssuePlatform,
params={},
Expand All @@ -511,7 +516,8 @@ def count_performance_issues(trace_id: str, params: SnubaParams) -> int:
)
transaction_query.columns.append(Function("count()", alias="total_groups"))
count = transaction_query.run_query(
referrer=Referrer.API_TRACE_VIEW_COUNT_PERFORMANCE_ISSUES.value
referrer=Referrer.API_TRACE_VIEW_COUNT_PERFORMANCE_ISSUES.value,
query_source=query_source,
)
return count["data"][0].get("total_groups", 0)

Expand All @@ -520,6 +526,7 @@ def count_performance_issues(trace_id: str, params: SnubaParams) -> int:
def create_transaction_params(
trace_id: str,
snuba_params: SnubaParams,
query_source: QuerySource | None = QuerySource.SENTRY_BACKEND,
) -> SnubaParams:
"""Can't use the transaction params for errors since traces can be errors only"""
query_metadata = options.get("performance.traces.query_timestamp_projects")
Expand All @@ -538,7 +545,9 @@ def create_transaction_params(
"project.id",
],
)
results = metadata_query.run_query(Referrer.API_TRACE_VIEW_GET_TIMESTAMP_PROJECTS.value)
results = metadata_query.run_query(
Referrer.API_TRACE_VIEW_GET_TIMESTAMP_PROJECTS.value, query_source=query_source
)
results = metadata_query.process_results(results)
project_id_set = set()
min_timestamp = None
Expand Down Expand Up @@ -583,6 +592,7 @@ def query_trace_data(
limit: int,
event_id: str | None,
use_spans: bool,
query_source: QuerySource | None = QuerySource.SENTRY_BACKEND,
) -> tuple[Sequence[SnubaTransaction], Sequence[SnubaError]]:
transaction_columns = [
"id",
Expand Down Expand Up @@ -672,6 +682,7 @@ def query_trace_data(
occurrence_query.get_snql_query(),
],
referrer=Referrer.API_TRACE_VIEW_GET_EVENTS.value,
query_source=query_source,
)

transformed_results = [
Expand Down Expand Up @@ -774,6 +785,7 @@ def augment_transactions_with_spans(
errors: Sequence[SnubaError],
trace_id: str,
params: SnubaParams,
query_source: QuerySource | None = QuerySource.SENTRY_BACKEND,
) -> Sequence[SnubaTransaction]:
"""Augment the list of transactions with parent, error and problem data"""
with sentry_sdk.start_span(op="augment.transactions", description="setup"):
Expand Down Expand Up @@ -882,6 +894,7 @@ def augment_transactions_with_spans(
results = bulk_snuba_queries(
[query.get_snql_query() for query in queries],
referrer=Referrer.API_TRACE_VIEW_GET_PARENTS.value,
query_source=query_source,
)
parents_results = results[0]
for result, query in zip(results, queries):
Expand All @@ -892,7 +905,8 @@ def augment_transactions_with_spans(
else:
parents_query = build_span_query(trace_id, spans_params, list(query_spans))
parents_results = parents_query.run_query(
referrer=Referrer.API_TRACE_VIEW_GET_PARENTS.value
referrer=Referrer.API_TRACE_VIEW_GET_PARENTS.value,
query_source=query_source,
)
if len(parents_results) == parents_query.limit.limit:
hit_limit = True
Expand Down Expand Up @@ -1054,19 +1068,20 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht
if event_id and not is_event_id(event_id):
return Response({"detail": INVALID_ID_DETAILS.format("Event ID")}, status=400)

query_source = QuerySource.FRONTEND if is_frontend_request(request) else QuerySource.API
with handle_query_errors():
transaction_params = create_transaction_params(trace_id, snuba_params)
transaction_params = create_transaction_params(trace_id, snuba_params, query_source=query_source)

if use_spans:
transactions, errors = query_trace_data(
trace_id, snuba_params, transaction_params, limit, event_id, use_spans
trace_id, snuba_params, transaction_params, limit, event_id, use_spans, query_source=query_source
)
transactions = augment_transactions_with_spans(
transactions, errors, trace_id, snuba_params
transactions, errors, trace_id, snuba_params, query_source=query_source,
)
else:
transactions, errors = query_trace_data(
trace_id, snuba_params, transaction_params, limit, None, False
trace_id, snuba_params, transaction_params, limit, None, False, query_source=query_source
)
self.record_analytics(transactions, trace_id, self.request.user.id, organization.id)

Expand Down Expand Up @@ -1098,6 +1113,9 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht
event_id,
detailed,
use_spans,
query_source=(
QuerySource.FRONTEND if is_frontend_request(request) else QuerySource.API
),
)
)

Expand Down Expand Up @@ -1182,6 +1200,7 @@ def serialize(
event_id: str | None,
detailed: bool = False,
use_spans: bool = False,
query_source: QuerySource | None = None,
) -> dict[str, list[LightResponse | TraceError]]:
"""Because the light endpoint could potentially have gaps between root and event we return a flattened list"""
if use_spans:
Expand Down Expand Up @@ -1245,13 +1264,14 @@ def serialize(
0,
True,
snuba_params=snuba_params,
query_source=query_source,
)
)
current_generation = 1
break

current_event = TraceEvent(
snuba_event, root_id, current_generation, True, snuba_params=snuba_params
snuba_event, root_id, current_generation, True, snuba_params=snuba_params, query_source=query_source,
)
trace_results.append(current_event)

Expand Down Expand Up @@ -1284,6 +1304,7 @@ def serialize(
),
True,
snuba_params=snuba_params,
query_source=query_source,
)
for child_event in child_events
]
Expand Down Expand Up @@ -1345,6 +1366,7 @@ def serialize(
event_id: str | None,
detailed: bool = False,
use_spans: bool = False,
query_source: QuerySource | None = None,
) -> SerializedTrace:
"""For the full event trace, we return the results as a graph instead of a flattened list
Expand All @@ -1360,6 +1382,7 @@ def serialize(
warning_extra,
event_id,
detailed,
query_source=query_source,
)
return results

Expand All @@ -1379,7 +1402,7 @@ def serialize(
if roots:
results_map[None] = []
for root in roots:
root_event = TraceEvent(root, None, 0, snuba_params=snuba_params)
root_event = TraceEvent(root, None, 0, snuba_params=snuba_params, query_source=query_source)
parent_events[root["id"]] = root_event
results_map[None].append(root_event)
to_check.append(root)
Expand All @@ -1400,7 +1423,7 @@ def serialize(
parent_map[parent_span_id] = siblings

previous_event = parent_events[current_event["id"]] = TraceEvent(
current_event, None, 0, snuba_params=snuba_params
current_event, None, 0, snuba_params=snuba_params, query_source=query_source,
)

# Used to avoid removing the orphan from results entirely if we loop
Expand Down Expand Up @@ -1475,6 +1498,7 @@ def serialize(
else None
),
snuba_params=snuba_params,
query_source=query_source,
)
# Add this event to its parent's children
previous_event.children.append(parent_events[child_event["id"]])
Expand Down Expand Up @@ -1547,6 +1571,7 @@ def serialize_with_spans(
event_id: str | None,
detailed: bool = False,
) -> SerializedTrace:
query_source: QuerySource | None = None,
root_traces: list[TraceEvent] = []
orphans: list[TraceEvent] = []
orphan_event_ids: set[str] = set()
Expand All @@ -1560,7 +1585,11 @@ def serialize_with_spans(
for transaction in transactions:
parent_id = transaction["trace.parent_transaction"]
serialized_transaction = TraceEvent(
transaction, parent_id, -1, span_serialized=True
transaction,
parent_id,
-1,
span_serialized=True,
query_source=query_source,
)
if parent_id is None:
if transaction["trace.parent_span"]:
Expand Down Expand Up @@ -1637,6 +1666,7 @@ class OrganizationEventsTraceMetaEndpoint(OrganizationEventsV2EndpointBase):
publish_status = {
"GET": ApiPublishStatus.PRIVATE,
}
snuba_methods = ["GET"]

def get(self, request: Request, organization: Organization, trace_id: str) -> HttpResponse:
if not self.has_feature(organization, request):
Expand All @@ -1651,7 +1681,7 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht
return Response(status=404)

update_snuba_params_with_timestamp(request, snuba_params)

query_source = QuerySource.FRONTEND if is_frontend_request(request) else QuerySource.API
with handle_query_errors():
result = discover.query(
selected_columns=[
Expand All @@ -1664,12 +1694,13 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Ht
query=f"trace:{trace_id}",
limit=1,
referrer=Referrer.API_TRACE_VIEW_GET_META.value,
query_source=query_source,
)
if len(result["data"]) == 0:
return Response(status=404)
# Merge the result back into the first query
result["data"][0]["performance_issues"] = count_performance_issues(
trace_id, snuba_params
trace_id, snuba_params, query_source=query_source
)
return Response(self.serialize(result["data"][0]))

Expand Down
Loading

0 comments on commit a3ab8a9

Please sign in to comment.