Skip to content

Commit

Permalink
feat(eap): Use new schema for the MQL EAP bridge (#76714)
Browse files Browse the repository at this point in the history
This standardizes things between graphing and samples in such a way that
a lot of logic can be reused in snuba
  • Loading branch information
colin-sentry committed Aug 29, 2024
1 parent 73047e4 commit d87aacd
Show file tree
Hide file tree
Showing 4 changed files with 31 additions and 14 deletions.
2 changes: 1 addition & 1 deletion requirements-base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ rfc3986-validator>=0.1.1
sentry-arroyo>=2.16.5
sentry-kafka-schemas>=0.1.106
sentry-ophio==0.2.7
sentry-protos>=0.1.3
sentry-protos>=0.1.15
sentry-redis-tools>=0.1.7
sentry-relay>=0.9.1
sentry-sdk>=2.12.0
Expand Down
2 changes: 1 addition & 1 deletion requirements-dev-frozen.txt
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ sentry-forked-django-stubs==5.0.4.post1
sentry-forked-djangorestframework-stubs==3.15.0.post1
sentry-kafka-schemas==0.1.106
sentry-ophio==0.2.7
sentry-protos==0.1.3
sentry-protos==0.1.15
sentry-redis-tools==0.1.7
sentry-relay==0.9.1
sentry-sdk==2.12.0
Expand Down
2 changes: 1 addition & 1 deletion requirements-frozen.txt
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ s3transfer==0.10.0
sentry-arroyo==2.16.5
sentry-kafka-schemas==0.1.106
sentry-ophio==0.2.7
sentry-protos==0.1.3
sentry-protos==0.1.15
sentry-redis-tools==0.1.7
sentry-relay==0.9.1
sentry-sdk==2.12.0
Expand Down
39 changes: 28 additions & 11 deletions src/sentry/sentry_metrics/querying/eap/mql_eap_bridge.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,15 @@
AggregateBucketResponse,
)
from sentry_protos.snuba.v1alpha.request_common_pb2 import RequestMeta
from sentry_protos.snuba.v1alpha.trace_item_attribute_pb2 import (
AttributeKey,
AttributeKeyTransformContext,
AttributeValue,
)
from sentry_protos.snuba.v1alpha.trace_item_filter_pb2 import (
AndFilter,
ComparisonFilter,
OrFilter,
StringFilter,
TraceItemFilter,
)
from snuba_sdk import Timeseries
Expand All @@ -39,7 +44,12 @@ def parse_mql_filters(group: ConditionGroup) -> Iterable[TraceItemFilter]:
)
elif isinstance(cond, MQLCondition):
if cond.op == MQLOp.EQ:
yield TraceItemFilter(string_filter=StringFilter(key=cond.lhs.name, value=cond.rhs))
yield TraceItemFilter(
comparison_filter=ComparisonFilter(
key=AttributeKey(name=cond.lhs.name, type=AttributeKey.Type.TYPE_STRING),
value=AttributeValue(val_str=cond.rhs),
)
)
# TODO: maybe we want to implement other stuff


Expand Down Expand Up @@ -69,31 +79,38 @@ def make_eap_request(
}

rpc_filters = None
if ts.filters is not None:
if ts.filters is not None and len(ts.filters) > 0:
rpc_filters = TraceItemFilter(
and_filter=AndFilter(filters=list(parse_mql_filters(ts.filters)))
)
req = AggregateBucketRequest(
aggregate_req = AggregateBucketRequest(
meta=RequestMeta(
organization_id=organization.id,
cogs_category="events_analytics_platform",
referrer=referrer,
project_ids=[project.id for project in projects],
start_timestamp=start_time_proto,
end_timestamp=end_time_proto,
),
start_timestamp=start_time_proto,
end_timestamp=end_time_proto,
aggregate=aggregate_map[ts.aggregate],
filter=rpc_filters,
granularity_secs=interval,
metric_name=ts.metric.mri.split("/")[1].split("@")[0],
key=AttributeKey(
name=ts.metric.mri.split("/")[1].split("@")[0], type=AttributeKey.TYPE_FLOAT
),
attribute_key_transform_context=AttributeKeyTransformContext(
project_ids_to_names={project.id: project.slug for project in projects}
),
)
http_resp = requests.post(
f"{settings.SENTRY_SNUBA}/timeseries", data=aggregate_req.SerializeToString()
)
http_resp = requests.post(f"{settings.SENTRY_SNUBA}/timeseries", data=req.SerializeToString())
http_resp.raise_for_status()

resp = AggregateBucketResponse()
resp.ParseFromString(http_resp.content)
aggregate_resp = AggregateBucketResponse()
aggregate_resp.ParseFromString(http_resp.content)

series_data = list(resp.result)
series_data = list(aggregate_resp.result)
duration = end - start
intervals = []
if len(series_data) > 0:
Expand Down

0 comments on commit d87aacd

Please sign in to comment.