From fc11af17f7558e56a4139d061f47bef0369de0b7 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Fri, 16 Aug 2024 13:29:56 +0000 Subject: [PATCH 01/12] Revert "fix(eap-spans): Make project_id index removal migration a no-op (#6215)" This reverts commit 5155a47cc31c0ad366c2b427db1647e8133ee9ea. Co-authored-by: phacops <336345+phacops@users.noreply.github.com> --- .../0007_drop_project_id_index.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/snuba/snuba_migrations/events_analytics_platform/0007_drop_project_id_index.py b/snuba/snuba_migrations/events_analytics_platform/0007_drop_project_id_index.py index 321cf02ece..57293d5535 100644 --- a/snuba/snuba_migrations/events_analytics_platform/0007_drop_project_id_index.py +++ b/snuba/snuba_migrations/events_analytics_platform/0007_drop_project_id_index.py @@ -1,14 +1,25 @@ from typing import Sequence -from snuba.migrations import migration +from snuba.clusters.storage_sets import StorageSetKey +from snuba.migrations import migration, operations from snuba.migrations.operations import SqlOperation +storage_set_name = StorageSetKey.EVENTS_ANALYTICS_PLATFORM +local_table_name = "eap_spans_local" + class Migration(migration.ClickhouseNodeMigration): blocking = False def forwards_ops(self) -> Sequence[SqlOperation]: - return [] + return [ + operations.DropIndex( + storage_set=StorageSetKey.EVENTS_ANALYTICS_PLATFORM, + table_name="eap_spans_local", + index_name="bf_project_id", + target=operations.OperationTarget.LOCAL, + ), + ] def backwards_ops(self) -> Sequence[SqlOperation]: return [] From 9364a3fc2613cef09bd65d8d0e7ed0780b5b91b8 Mon Sep 17 00:00:00 2001 From: volokluev <3169433+volokluev@users.noreply.github.com> Date: Fri, 16 Aug 2024 09:03:59 -0700 Subject: [PATCH 02/12] feat(eap): barebones functionality for span based metrics endpoint (#6207) putting this here to get some eyes --- Makefile | 8 -- requirements.txt | 1 + snuba/protobufs/AggregateBucket.proto | 27 ---- snuba/protobufs/AggregateBucket_pb2.py | 42 ------ snuba/protobufs/AggregateBucket_pb2.pyi | 88 ------------ snuba/protobufs/BaseRequest.proto | 12 -- snuba/protobufs/BaseRequest_pb2.py | 37 ----- snuba/protobufs/BaseRequest_pb2.pyi | 48 ------- snuba/protobufs/Filters.proto | 49 ------- snuba/protobufs/Filters_pb2.py | 50 ------- snuba/protobufs/Filters_pb2.pyi | 175 ----------------------- snuba/protobufs/FindTrace.proto | 11 -- snuba/protobufs/FindTrace_pb2.py | 39 ----- snuba/protobufs/FindTrace_pb2.pyi | 46 ------ snuba/protobufs/README.md | 7 - snuba/protobufs/__init__.py | 0 snuba/web/rpc/find_traces.py | 85 ----------- snuba/web/rpc/timeseries.py | 181 ++++++++++++++++++++++++ snuba/web/views.py | 19 +-- tests/web/rpc/test_timeseries_api.py | 126 +++++++++++++++-- 20 files changed, 303 insertions(+), 748 deletions(-) delete mode 100644 snuba/protobufs/AggregateBucket.proto delete mode 100644 snuba/protobufs/AggregateBucket_pb2.py delete mode 100644 snuba/protobufs/AggregateBucket_pb2.pyi delete mode 100644 snuba/protobufs/BaseRequest.proto delete mode 100644 snuba/protobufs/BaseRequest_pb2.py delete mode 100644 snuba/protobufs/BaseRequest_pb2.pyi delete mode 100644 snuba/protobufs/Filters.proto delete mode 100644 snuba/protobufs/Filters_pb2.py delete mode 100644 snuba/protobufs/Filters_pb2.pyi delete mode 100644 snuba/protobufs/FindTrace.proto delete mode 100644 snuba/protobufs/FindTrace_pb2.py delete mode 100644 snuba/protobufs/FindTrace_pb2.pyi delete mode 100644 snuba/protobufs/README.md delete mode 100644 snuba/protobufs/__init__.py delete mode 100644 snuba/web/rpc/find_traces.py create mode 100644 snuba/web/rpc/timeseries.py diff --git a/Makefile b/Makefile index 396c4cffbe..6f29559f45 100644 --- a/Makefile +++ b/Makefile @@ -64,14 +64,6 @@ install-brew-dev: brew bundle .PHONY: install-brew-dev -# this can go away once sentry formalizes a way of working with protobuf / grpc -protos: - @which protoc || (echo "!!! You need protoc installed in order to build protos. https://grpc.io/docs/protoc-installation/" && exit 1) - @type protoc-gen-mypy || (echo "!!! Failed, run this: pip install mypy-protobuf==3.6.0" && exit 1) - protoc --python_out=. --mypy_out=. $$(find snuba/protobufs -name '*.proto') - -.PHONY: protos - snubadocs: pip install -U -r ./docs-requirements.txt sphinx-build -W -b html docs/source docs/build diff --git a/requirements.txt b/requirements.txt index 5248594b1d..83172dc16c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -45,3 +45,4 @@ sqlparse==0.4.2 google-api-python-client==2.88.0 sentry-usage-accountant==0.0.10 freezegun==1.2.2 +sentry-protos==0.1.3 diff --git a/snuba/protobufs/AggregateBucket.proto b/snuba/protobufs/AggregateBucket.proto deleted file mode 100644 index 7c40f5df3f..0000000000 --- a/snuba/protobufs/AggregateBucket.proto +++ /dev/null @@ -1,27 +0,0 @@ -syntax = "proto3"; - -import "snuba/protobufs/Filters.proto"; -import "snuba/protobufs/BaseRequest.proto"; - -message AggregateBucketRequest { - RequestInfo request_info = 1; - enum Function { - SUM = 0; - AVERAGE = 1; - COUNT = 2; - P50 = 3; - P95 = 4; - P99 = 5; - AVG = 6; - } - - Function aggregate = 3; - TraceItemFilter filter = 4; - uint64 granularity_secs = 5; - - //TODO: group by, topn, etc, not necessary for MVP -} - -message AggregateBucketResponse { - repeated float result = 1; -} diff --git a/snuba/protobufs/AggregateBucket_pb2.py b/snuba/protobufs/AggregateBucket_pb2.py deleted file mode 100644 index 3234d5a566..0000000000 --- a/snuba/protobufs/AggregateBucket_pb2.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: snuba/protobufs/AggregateBucket.proto -# Protobuf Python Version: 5.27.3 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 5, - 27, - 3, - '', - 'snuba/protobufs/AggregateBucket.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from snuba.protobufs import Filters_pb2 as snuba_dot_protobufs_dot_Filters__pb2 -from snuba.protobufs import BaseRequest_pb2 as snuba_dot_protobufs_dot_BaseRequest__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%snuba/protobufs/AggregateBucket.proto\x1a\x1dsnuba/protobufs/Filters.proto\x1a!snuba/protobufs/BaseRequest.proto\"\xfe\x01\n\x16\x41ggregateBucketRequest\x12\"\n\x0crequest_info\x18\x01 \x01(\x0b\x32\x0c.RequestInfo\x12\x33\n\taggregate\x18\x03 \x01(\x0e\x32 .AggregateBucketRequest.Function\x12 \n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x10.TraceItemFilter\x12\x18\n\x10granularity_secs\x18\x05 \x01(\x04\"O\n\x08\x46unction\x12\x07\n\x03SUM\x10\x00\x12\x0b\n\x07\x41VERAGE\x10\x01\x12\t\n\x05\x43OUNT\x10\x02\x12\x07\n\x03P50\x10\x03\x12\x07\n\x03P95\x10\x04\x12\x07\n\x03P99\x10\x05\x12\x07\n\x03\x41VG\x10\x06\")\n\x17\x41ggregateBucketResponse\x12\x0e\n\x06result\x18\x01 \x03(\x02\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'snuba.protobufs.AggregateBucket_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_AGGREGATEBUCKETREQUEST']._serialized_start=108 - _globals['_AGGREGATEBUCKETREQUEST']._serialized_end=362 - _globals['_AGGREGATEBUCKETREQUEST_FUNCTION']._serialized_start=283 - _globals['_AGGREGATEBUCKETREQUEST_FUNCTION']._serialized_end=362 - _globals['_AGGREGATEBUCKETRESPONSE']._serialized_start=364 - _globals['_AGGREGATEBUCKETRESPONSE']._serialized_end=405 -# @@protoc_insertion_point(module_scope) diff --git a/snuba/protobufs/AggregateBucket_pb2.pyi b/snuba/protobufs/AggregateBucket_pb2.pyi deleted file mode 100644 index f25f73ac85..0000000000 --- a/snuba/protobufs/AggregateBucket_pb2.pyi +++ /dev/null @@ -1,88 +0,0 @@ -""" -@generated by mypy-protobuf. Do not edit manually! -isort:skip_file -""" - -import builtins -import collections.abc -import google.protobuf.descriptor -import google.protobuf.internal.containers -import google.protobuf.internal.enum_type_wrapper -import google.protobuf.message -import snuba.protobufs.BaseRequest_pb2 -import snuba.protobufs.Filters_pb2 -import sys -import typing - -if sys.version_info >= (3, 10): - import typing as typing_extensions -else: - import typing_extensions - -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor - -@typing.final -class AggregateBucketRequest(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - class _Function: - ValueType = typing.NewType("ValueType", builtins.int) - V: typing_extensions.TypeAlias = ValueType - - class _FunctionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[AggregateBucketRequest._Function.ValueType], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor - SUM: AggregateBucketRequest._Function.ValueType # 0 - AVERAGE: AggregateBucketRequest._Function.ValueType # 1 - COUNT: AggregateBucketRequest._Function.ValueType # 2 - P50: AggregateBucketRequest._Function.ValueType # 3 - P95: AggregateBucketRequest._Function.ValueType # 4 - P99: AggregateBucketRequest._Function.ValueType # 5 - AVG: AggregateBucketRequest._Function.ValueType # 6 - - class Function(_Function, metaclass=_FunctionEnumTypeWrapper): ... - SUM: AggregateBucketRequest.Function.ValueType # 0 - AVERAGE: AggregateBucketRequest.Function.ValueType # 1 - COUNT: AggregateBucketRequest.Function.ValueType # 2 - P50: AggregateBucketRequest.Function.ValueType # 3 - P95: AggregateBucketRequest.Function.ValueType # 4 - P99: AggregateBucketRequest.Function.ValueType # 5 - AVG: AggregateBucketRequest.Function.ValueType # 6 - - REQUEST_INFO_FIELD_NUMBER: builtins.int - AGGREGATE_FIELD_NUMBER: builtins.int - FILTER_FIELD_NUMBER: builtins.int - GRANULARITY_SECS_FIELD_NUMBER: builtins.int - aggregate: global___AggregateBucketRequest.Function.ValueType - granularity_secs: builtins.int - @property - def request_info(self) -> snuba.protobufs.BaseRequest_pb2.RequestInfo: ... - @property - def filter(self) -> snuba.protobufs.Filters_pb2.TraceItemFilter: ... - def __init__( - self, - *, - request_info: snuba.protobufs.BaseRequest_pb2.RequestInfo | None = ..., - aggregate: global___AggregateBucketRequest.Function.ValueType = ..., - filter: snuba.protobufs.Filters_pb2.TraceItemFilter | None = ..., - granularity_secs: builtins.int = ..., - ) -> None: ... - def HasField(self, field_name: typing.Literal["filter", b"filter", "request_info", b"request_info"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["aggregate", b"aggregate", "filter", b"filter", "granularity_secs", b"granularity_secs", "request_info", b"request_info"]) -> None: ... - -global___AggregateBucketRequest = AggregateBucketRequest - -@typing.final -class AggregateBucketResponse(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - RESULT_FIELD_NUMBER: builtins.int - @property - def result(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ... - def __init__( - self, - *, - result: collections.abc.Iterable[builtins.float] | None = ..., - ) -> None: ... - def ClearField(self, field_name: typing.Literal["result", b"result"]) -> None: ... - -global___AggregateBucketResponse = AggregateBucketResponse diff --git a/snuba/protobufs/BaseRequest.proto b/snuba/protobufs/BaseRequest.proto deleted file mode 100644 index 3bd07d9512..0000000000 --- a/snuba/protobufs/BaseRequest.proto +++ /dev/null @@ -1,12 +0,0 @@ -syntax = "proto3"; - -import "google/protobuf/timestamp.proto"; - -message RequestInfo { - google.protobuf.Timestamp start_timestamp = 1; - google.protobuf.Timestamp end_timestamp = 2; - uint64 organization_id = 3; - string cogs_category = 4; - string referrer = 5; - repeated uint64 project_ids = 6; -} diff --git a/snuba/protobufs/BaseRequest_pb2.py b/snuba/protobufs/BaseRequest_pb2.py deleted file mode 100644 index b512c4d32a..0000000000 --- a/snuba/protobufs/BaseRequest_pb2.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: snuba/protobufs/BaseRequest.proto -# Protobuf Python Version: 5.27.3 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 5, - 27, - 3, - '', - 'snuba/protobufs/BaseRequest.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!snuba/protobufs/BaseRequest.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xcc\x01\n\x0bRequestInfo\x12\x33\n\x0fstart_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rend_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x17\n\x0forganization_id\x18\x03 \x01(\x04\x12\x15\n\rcogs_category\x18\x04 \x01(\t\x12\x10\n\x08referrer\x18\x05 \x01(\t\x12\x13\n\x0bproject_ids\x18\x06 \x03(\x04\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'snuba.protobufs.BaseRequest_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_REQUESTINFO']._serialized_start=71 - _globals['_REQUESTINFO']._serialized_end=275 -# @@protoc_insertion_point(module_scope) diff --git a/snuba/protobufs/BaseRequest_pb2.pyi b/snuba/protobufs/BaseRequest_pb2.pyi deleted file mode 100644 index 2fd97d806c..0000000000 --- a/snuba/protobufs/BaseRequest_pb2.pyi +++ /dev/null @@ -1,48 +0,0 @@ -""" -@generated by mypy-protobuf. Do not edit manually! -isort:skip_file -""" - -import builtins -import collections.abc -import google.protobuf.descriptor -import google.protobuf.internal.containers -import google.protobuf.message -import google.protobuf.timestamp_pb2 -import typing - -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor - -@typing.final -class RequestInfo(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - START_TIMESTAMP_FIELD_NUMBER: builtins.int - END_TIMESTAMP_FIELD_NUMBER: builtins.int - ORGANIZATION_ID_FIELD_NUMBER: builtins.int - COGS_CATEGORY_FIELD_NUMBER: builtins.int - REFERRER_FIELD_NUMBER: builtins.int - PROJECT_IDS_FIELD_NUMBER: builtins.int - organization_id: builtins.int - cogs_category: builtins.str - referrer: builtins.str - @property - def start_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... - @property - def end_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... - @property - def project_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... - def __init__( - self, - *, - start_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., - end_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., - organization_id: builtins.int = ..., - cogs_category: builtins.str = ..., - referrer: builtins.str = ..., - project_ids: collections.abc.Iterable[builtins.int] | None = ..., - ) -> None: ... - def HasField(self, field_name: typing.Literal["end_timestamp", b"end_timestamp", "start_timestamp", b"start_timestamp"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["cogs_category", b"cogs_category", "end_timestamp", b"end_timestamp", "organization_id", b"organization_id", "project_ids", b"project_ids", "referrer", b"referrer", "start_timestamp", b"start_timestamp"]) -> None: ... - -global___RequestInfo = RequestInfo diff --git a/snuba/protobufs/Filters.proto b/snuba/protobufs/Filters.proto deleted file mode 100644 index 51a7ad1612..0000000000 --- a/snuba/protobufs/Filters.proto +++ /dev/null @@ -1,49 +0,0 @@ -syntax = "proto3"; - -message AndFilter { - repeated TraceItemFilter filters = 1; -} - -message OrFilter { - repeated TraceItemFilter filters = 1; -} - -message NumericalFilter { - enum Op { - LESS_THAN = 0; - GREATER_THAN = 1; - LESS_THAN_OR_EQUALS = 2; - GREATER_THAN_OR_EQUALS = 3; - EQUALS = 4; - NOT_EQUALS = 5; - } - string key = 1; - Op op = 2; - float value = 3; -} - -message StringFilter { - enum Op { - EQUALS = 0; - NOT_EQUALS = 1; - LIKE = 2; - NOT_LIKE = 3; - } - string key = 1; - Op op = 2; - string value = 3; -} - -message ExistsFilter { - string key = 1; -} - -message TraceItemFilter { - oneof value { - AndFilter and = 1; - OrFilter or = 2; - NumericalFilter number_comparison = 3; - StringFilter string_comparison = 4; - ExistsFilter exists = 5; - } -} diff --git a/snuba/protobufs/Filters_pb2.py b/snuba/protobufs/Filters_pb2.py deleted file mode 100644 index d0a087b963..0000000000 --- a/snuba/protobufs/Filters_pb2.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: snuba/protobufs/Filters.proto -# Protobuf Python Version: 5.27.3 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 5, - 27, - 3, - '', - 'snuba/protobufs/Filters.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dsnuba/protobufs/Filters.proto\".\n\tAndFilter\x12!\n\x07\x66ilters\x18\x01 \x03(\x0b\x32\x10.TraceItemFilter\"-\n\x08OrFilter\x12!\n\x07\x66ilters\x18\x01 \x03(\x0b\x32\x10.TraceItemFilter\"\xc6\x01\n\x0fNumericalFilter\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1f\n\x02op\x18\x02 \x01(\x0e\x32\x13.NumericalFilter.Op\x12\r\n\x05value\x18\x03 \x01(\x02\"v\n\x02Op\x12\r\n\tLESS_THAN\x10\x00\x12\x10\n\x0cGREATER_THAN\x10\x01\x12\x17\n\x13LESS_THAN_OR_EQUALS\x10\x02\x12\x1a\n\x16GREATER_THAN_OR_EQUALS\x10\x03\x12\n\n\x06\x45QUALS\x10\x04\x12\x0e\n\nNOT_EQUALS\x10\x05\"\x82\x01\n\x0cStringFilter\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x02op\x18\x02 \x01(\x0e\x32\x10.StringFilter.Op\x12\r\n\x05value\x18\x03 \x01(\t\"8\n\x02Op\x12\n\n\x06\x45QUALS\x10\x00\x12\x0e\n\nNOT_EQUALS\x10\x01\x12\x08\n\x04LIKE\x10\x02\x12\x0c\n\x08NOT_LIKE\x10\x03\"\x1b\n\x0c\x45xistsFilter\x12\x0b\n\x03key\x18\x01 \x01(\t\"\xca\x01\n\x0fTraceItemFilter\x12\x19\n\x03\x61nd\x18\x01 \x01(\x0b\x32\n.AndFilterH\x00\x12\x17\n\x02or\x18\x02 \x01(\x0b\x32\t.OrFilterH\x00\x12-\n\x11number_comparison\x18\x03 \x01(\x0b\x32\x10.NumericalFilterH\x00\x12*\n\x11string_comparison\x18\x04 \x01(\x0b\x32\r.StringFilterH\x00\x12\x1f\n\x06\x65xists\x18\x05 \x01(\x0b\x32\r.ExistsFilterH\x00\x42\x07\n\x05valueb\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'snuba.protobufs.Filters_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_ANDFILTER']._serialized_start=33 - _globals['_ANDFILTER']._serialized_end=79 - _globals['_ORFILTER']._serialized_start=81 - _globals['_ORFILTER']._serialized_end=126 - _globals['_NUMERICALFILTER']._serialized_start=129 - _globals['_NUMERICALFILTER']._serialized_end=327 - _globals['_NUMERICALFILTER_OP']._serialized_start=209 - _globals['_NUMERICALFILTER_OP']._serialized_end=327 - _globals['_STRINGFILTER']._serialized_start=330 - _globals['_STRINGFILTER']._serialized_end=460 - _globals['_STRINGFILTER_OP']._serialized_start=404 - _globals['_STRINGFILTER_OP']._serialized_end=460 - _globals['_EXISTSFILTER']._serialized_start=462 - _globals['_EXISTSFILTER']._serialized_end=489 - _globals['_TRACEITEMFILTER']._serialized_start=492 - _globals['_TRACEITEMFILTER']._serialized_end=694 -# @@protoc_insertion_point(module_scope) diff --git a/snuba/protobufs/Filters_pb2.pyi b/snuba/protobufs/Filters_pb2.pyi deleted file mode 100644 index 520e70d223..0000000000 --- a/snuba/protobufs/Filters_pb2.pyi +++ /dev/null @@ -1,175 +0,0 @@ -""" -@generated by mypy-protobuf. Do not edit manually! -isort:skip_file -""" - -import builtins -import collections.abc -import google.protobuf.descriptor -import google.protobuf.internal.containers -import google.protobuf.internal.enum_type_wrapper -import google.protobuf.message -import sys -import typing - -if sys.version_info >= (3, 10): - import typing as typing_extensions -else: - import typing_extensions - -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor - -@typing.final -class AndFilter(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - FILTERS_FIELD_NUMBER: builtins.int - @property - def filters(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TraceItemFilter]: ... - def __init__( - self, - *, - filters: collections.abc.Iterable[global___TraceItemFilter] | None = ..., - ) -> None: ... - def ClearField(self, field_name: typing.Literal["filters", b"filters"]) -> None: ... - -global___AndFilter = AndFilter - -@typing.final -class OrFilter(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - FILTERS_FIELD_NUMBER: builtins.int - @property - def filters(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TraceItemFilter]: ... - def __init__( - self, - *, - filters: collections.abc.Iterable[global___TraceItemFilter] | None = ..., - ) -> None: ... - def ClearField(self, field_name: typing.Literal["filters", b"filters"]) -> None: ... - -global___OrFilter = OrFilter - -@typing.final -class NumericalFilter(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - class _Op: - ValueType = typing.NewType("ValueType", builtins.int) - V: typing_extensions.TypeAlias = ValueType - - class _OpEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[NumericalFilter._Op.ValueType], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor - LESS_THAN: NumericalFilter._Op.ValueType # 0 - GREATER_THAN: NumericalFilter._Op.ValueType # 1 - LESS_THAN_OR_EQUALS: NumericalFilter._Op.ValueType # 2 - GREATER_THAN_OR_EQUALS: NumericalFilter._Op.ValueType # 3 - EQUALS: NumericalFilter._Op.ValueType # 4 - NOT_EQUALS: NumericalFilter._Op.ValueType # 5 - - class Op(_Op, metaclass=_OpEnumTypeWrapper): ... - LESS_THAN: NumericalFilter.Op.ValueType # 0 - GREATER_THAN: NumericalFilter.Op.ValueType # 1 - LESS_THAN_OR_EQUALS: NumericalFilter.Op.ValueType # 2 - GREATER_THAN_OR_EQUALS: NumericalFilter.Op.ValueType # 3 - EQUALS: NumericalFilter.Op.ValueType # 4 - NOT_EQUALS: NumericalFilter.Op.ValueType # 5 - - KEY_FIELD_NUMBER: builtins.int - OP_FIELD_NUMBER: builtins.int - VALUE_FIELD_NUMBER: builtins.int - key: builtins.str - op: global___NumericalFilter.Op.ValueType - value: builtins.float - def __init__( - self, - *, - key: builtins.str = ..., - op: global___NumericalFilter.Op.ValueType = ..., - value: builtins.float = ..., - ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "op", b"op", "value", b"value"]) -> None: ... - -global___NumericalFilter = NumericalFilter - -@typing.final -class StringFilter(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - class _Op: - ValueType = typing.NewType("ValueType", builtins.int) - V: typing_extensions.TypeAlias = ValueType - - class _OpEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[StringFilter._Op.ValueType], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor - EQUALS: StringFilter._Op.ValueType # 0 - NOT_EQUALS: StringFilter._Op.ValueType # 1 - LIKE: StringFilter._Op.ValueType # 2 - NOT_LIKE: StringFilter._Op.ValueType # 3 - - class Op(_Op, metaclass=_OpEnumTypeWrapper): ... - EQUALS: StringFilter.Op.ValueType # 0 - NOT_EQUALS: StringFilter.Op.ValueType # 1 - LIKE: StringFilter.Op.ValueType # 2 - NOT_LIKE: StringFilter.Op.ValueType # 3 - - KEY_FIELD_NUMBER: builtins.int - OP_FIELD_NUMBER: builtins.int - VALUE_FIELD_NUMBER: builtins.int - key: builtins.str - op: global___StringFilter.Op.ValueType - value: builtins.str - def __init__( - self, - *, - key: builtins.str = ..., - op: global___StringFilter.Op.ValueType = ..., - value: builtins.str = ..., - ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "op", b"op", "value", b"value"]) -> None: ... - -global___StringFilter = StringFilter - -@typing.final -class ExistsFilter(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - KEY_FIELD_NUMBER: builtins.int - key: builtins.str - def __init__( - self, - *, - key: builtins.str = ..., - ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key"]) -> None: ... - -global___ExistsFilter = ExistsFilter - -@typing.final -class TraceItemFilter(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - AND_FIELD_NUMBER: builtins.int - OR_FIELD_NUMBER: builtins.int - NUMBER_COMPARISON_FIELD_NUMBER: builtins.int - STRING_COMPARISON_FIELD_NUMBER: builtins.int - EXISTS_FIELD_NUMBER: builtins.int - @property - def number_comparison(self) -> global___NumericalFilter: ... - @property - def string_comparison(self) -> global___StringFilter: ... - @property - def exists(self) -> global___ExistsFilter: ... - def __init__( - self, - *, - number_comparison: global___NumericalFilter | None = ..., - string_comparison: global___StringFilter | None = ..., - exists: global___ExistsFilter | None = ..., - ) -> None: ... - def HasField(self, field_name: typing.Literal["and", b"and", "exists", b"exists", "number_comparison", b"number_comparison", "or", b"or", "string_comparison", b"string_comparison", "value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["and", b"and", "exists", b"exists", "number_comparison", b"number_comparison", "or", b"or", "string_comparison", b"string_comparison", "value", b"value"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["value", b"value"]) -> typing.Literal["and", "or", "number_comparison", "string_comparison", "exists"] | None: ... - -global___TraceItemFilter = TraceItemFilter diff --git a/snuba/protobufs/FindTrace.proto b/snuba/protobufs/FindTrace.proto deleted file mode 100644 index 1ad1e5a23e..0000000000 --- a/snuba/protobufs/FindTrace.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -import "snuba/protobufs/Filters.proto"; - -message FindTraceRequest { - repeated TraceItemFilter filters = 1; -} - -message FindTraceResponse { - repeated string trace_uuids = 1; -} diff --git a/snuba/protobufs/FindTrace_pb2.py b/snuba/protobufs/FindTrace_pb2.py deleted file mode 100644 index 5c4ace2cd0..0000000000 --- a/snuba/protobufs/FindTrace_pb2.py +++ /dev/null @@ -1,39 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: snuba/protobufs/FindTrace.proto -# Protobuf Python Version: 5.27.3 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 5, - 27, - 3, - '', - 'snuba/protobufs/FindTrace.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from snuba.protobufs import Filters_pb2 as snuba_dot_protobufs_dot_Filters__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fsnuba/protobufs/FindTrace.proto\x1a\x1dsnuba/protobufs/Filters.proto\"5\n\x10\x46indTraceRequest\x12!\n\x07\x66ilters\x18\x01 \x03(\x0b\x32\x10.TraceItemFilter\"(\n\x11\x46indTraceResponse\x12\x13\n\x0btrace_uuids\x18\x01 \x03(\tb\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'snuba.protobufs.FindTrace_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_FINDTRACEREQUEST']._serialized_start=66 - _globals['_FINDTRACEREQUEST']._serialized_end=119 - _globals['_FINDTRACERESPONSE']._serialized_start=121 - _globals['_FINDTRACERESPONSE']._serialized_end=161 -# @@protoc_insertion_point(module_scope) diff --git a/snuba/protobufs/FindTrace_pb2.pyi b/snuba/protobufs/FindTrace_pb2.pyi deleted file mode 100644 index 7471f565dd..0000000000 --- a/snuba/protobufs/FindTrace_pb2.pyi +++ /dev/null @@ -1,46 +0,0 @@ -""" -@generated by mypy-protobuf. Do not edit manually! -isort:skip_file -""" - -import builtins -import collections.abc -import google.protobuf.descriptor -import google.protobuf.internal.containers -import google.protobuf.message -import snuba.protobufs.Filters_pb2 -import typing - -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor - -@typing.final -class FindTraceRequest(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - FILTERS_FIELD_NUMBER: builtins.int - @property - def filters(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[snuba.protobufs.Filters_pb2.TraceItemFilter]: ... - def __init__( - self, - *, - filters: collections.abc.Iterable[snuba.protobufs.Filters_pb2.TraceItemFilter] | None = ..., - ) -> None: ... - def ClearField(self, field_name: typing.Literal["filters", b"filters"]) -> None: ... - -global___FindTraceRequest = FindTraceRequest - -@typing.final -class FindTraceResponse(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - TRACE_UUIDS_FIELD_NUMBER: builtins.int - @property - def trace_uuids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... - def __init__( - self, - *, - trace_uuids: collections.abc.Iterable[builtins.str] | None = ..., - ) -> None: ... - def ClearField(self, field_name: typing.Literal["trace_uuids", b"trace_uuids"]) -> None: ... - -global___FindTraceResponse = FindTraceResponse diff --git a/snuba/protobufs/README.md b/snuba/protobufs/README.md deleted file mode 100644 index 8b2a1f6a86..0000000000 --- a/snuba/protobufs/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# To build -``` -# once -brew install protobuf -. .venv/bin/activate -make protos -``` diff --git a/snuba/protobufs/__init__.py b/snuba/protobufs/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/snuba/web/rpc/find_traces.py b/snuba/web/rpc/find_traces.py deleted file mode 100644 index b07b57d9a8..0000000000 --- a/snuba/web/rpc/find_traces.py +++ /dev/null @@ -1,85 +0,0 @@ -import asyncio -from typing import List - -from snuba.clickhouse.formatter.nodes import FormattedQuery, StringNode -from snuba.datasets.storages.factory import get_storage -from snuba.datasets.storages.storage_key import StorageKey -from snuba.protobufs.Filters_pb2 import NumericalFilter, StringFilter, TraceItemFilter -from snuba.protobufs.FindTrace_pb2 import FindTraceRequest, FindTraceResponse -from snuba.reader import Reader -from snuba.utils import constants -from snuba.utils.hashes import fnv_1a -from snuba.utils.metrics.timer import Timer - - -async def _find_traces_matching_filter( - reader: Reader, filt: TraceItemFilter -) -> set[str]: - cond = "" - - # TODO this is just a toy example, sql injection etc - if filt.exists: - k = filt.exists.key - bucket_idx = fnv_1a(k.encode("utf-8")) % constants.ATTRIBUTE_BUCKETS - cond = f"mapContains(attr_str_{bucket_idx}, '{k}') OR mapContains(attr_num_{bucket_idx}, '{k}')" - elif filt.string_comparison: - k = filt.string_comparison.key - op = filt.string_comparison.op - v = filt.string_comparison.value - bucket_idx = fnv_1a(k.encode("utf-8")) % constants.ATTRIBUTE_BUCKETS - - str_op_map = { - StringFilter.EQUALS: "=", - StringFilter.NOT_EQUALS: "<>", - StringFilter.LIKE: " LIKE ", - StringFilter.NOT_LIKE: " NOT LIKE ", - } - - cond = f"attr_str_{bucket_idx}['{k}']{str_op_map[op]}'{v}'" - elif filt.number_comparison: - k = filt.number_comparison.key - num_op = filt.number_comparison.op - num_v = filt.number_comparison.value - bucket_idx = fnv_1a(k.encode("utf-8")) % constants.ATTRIBUTE_BUCKETS - - num_op_map = { - NumericalFilter.EQUALS: "=", # TODO: float equality is finnicky, we might want to do |a-b|", - NumericalFilter.LESS_THAN: "<", - NumericalFilter.LESS_THAN_OR_EQUALS: "<=", - NumericalFilter.GREATER_THAN: ">", - NumericalFilter.GREATER_THAN_OR_EQUALS: ">=", - } - cond = f"attr_num_{bucket_idx}['{k}']{num_op_map[num_op]}{num_v}" - - query = f""" -SELECT trace_id -FROM eap_spans_local -WHERE ({cond}) - """ - res = reader.execute( - FormattedQuery([StringNode(query)]), - robust=True, - ) - - return set(x["trace_id"] for x in res["data"]) - - -async def find_traces(req: FindTraceRequest, timer: Timer) -> FindTraceResponse: - return FindTraceResponse() - if len(req.filters) == 0: - return FindTraceResponse() - - if len(req.filters) > 5: - return FindTraceResponse() # todo: better error handling - - storage = get_storage(StorageKey("eap_spans")) - reader = storage.get_cluster().get_reader() - - results: List[set[str]] = await asyncio.gather( - *(_find_traces_matching_filter(reader, filt) for filt in req.filters) - ) - - uuids = set.intersection(*results) - - return FindTraceResponse(trace_uuids=uuids) diff --git a/snuba/web/rpc/timeseries.py b/snuba/web/rpc/timeseries.py new file mode 100644 index 0000000000..b71bea920b --- /dev/null +++ b/snuba/web/rpc/timeseries.py @@ -0,0 +1,181 @@ +import uuid +from datetime import datetime + +from google.protobuf.json_format import MessageToDict +from sentry_protos.snuba.v1alpha.endpoint_aggregate_bucket_pb2 import ( + AggregateBucketRequest, + AggregateBucketResponse, +) + +from snuba.attribution.appid import AppID +from snuba.attribution.attribution_info import AttributionInfo +from snuba.datasets.entities.entity_key import EntityKey +from snuba.datasets.entities.factory import get_entity +from snuba.datasets.pluggable_dataset import PluggableDataset +from snuba.query import SelectedExpression +from snuba.query.conditions import combine_and_conditions, combine_or_conditions +from snuba.query.data_source.simple import Entity +from snuba.query.dsl import Functions as f +from snuba.query.dsl import ( + NestedColumn, + and_cond, + column, + in_cond, + literal, + literals_array, +) +from snuba.query.expressions import Expression, FunctionCall +from snuba.query.logical import Query +from snuba.query.query_settings import HTTPQuerySettings +from snuba.request import Request as SnubaRequest +from snuba.utils.metrics.timer import Timer +from snuba.web.query import run_query + +_HARDCODED_MEASUREMENT_NAME = "eap.measurement" + + +def _get_measurement_field( + request: AggregateBucketRequest, +) -> Expression: + field = NestedColumn("attr_num") + # HACK + return field[request.metric_name] + + +def _treeify_or_and_conditions(query: Query) -> None: + """ + look for expressions like or(a, b, c) and turn them into or(a, or(b, c)) + and(a, b, c) and turn them into and(a, and(b, c)) + + even though clickhouse sql supports arbitrary amount of arguments there are other parts of the + codebase which assume `or` and `and` have two arguments + + Adding this post-process step is easier than changing the rest of the query pipeline + + Note: does not apply to the conditions of a from_clause subquery (the nested one) + this is bc transform_expressions is not implemented for composite queries + """ + + def transform(exp: Expression) -> Expression: + if not isinstance(exp, FunctionCall): + return exp + + if exp.function_name == "and": + return combine_and_conditions(exp.parameters) + elif exp.function_name == "or": + return combine_or_conditions(exp.parameters) + else: + return exp + + query.transform_expressions(transform) + + +def _get_aggregate_func( + request: AggregateBucketRequest, +) -> Expression: + FuncEnum = AggregateBucketRequest.Function + lookup = { + FuncEnum.FUNCTION_SUM: f.sum( + _get_measurement_field(request), alias="measurement" + ), + FuncEnum.FUNCTION_AVERAGE: f.avg( + _get_measurement_field(request), alias="measurement" + ), + FuncEnum.FUNCTION_COUNT: f.count( + _get_measurement_field(request), alias="measurement" + ), + # curried functions PITA, to do later + FuncEnum.FUNCTION_P50: None, + FuncEnum.FUNCTION_P95: None, + FuncEnum.FUNCTION_P99: None, + } + res = lookup.get(request.aggregate, None) + if res is None: + NotImplementedError() + return res # type: ignore + + +def _build_condition(request: AggregateBucketRequest) -> Expression: + project_ids = in_cond( + column("project_id"), + literals_array( + alias=None, + literals=[literal(pid) for pid in request.meta.project_ids], + ), + ) + + return and_cond( + project_ids, + f.equals(column("organization_id"), request.meta.organization_id), + # HACK: timestamp name + f.less( + column("start_timestamp"), + f.toDateTime( + datetime.utcfromtimestamp(request.end_timestamp.seconds).isoformat() + ), + ), + f.greaterOrEquals( + column("start_timestamp"), + f.toDateTime( + datetime.utcfromtimestamp(request.start_timestamp.seconds).isoformat() + ), + ), + ) + + +def _build_query(request: AggregateBucketRequest) -> Query: + entity = Entity( + key=EntityKey("eap_spans"), + schema=get_entity(EntityKey("eap_spans")).get_data_model(), + sample=None, + ) + + res = Query( + from_clause=entity, + selected_columns=[ + SelectedExpression(name="time", expression=column("time", alias="time")), + SelectedExpression(name="agg", expression=_get_aggregate_func(request)), + ], + condition=_build_condition(request), + granularity=request.granularity_secs, + groupby=[column("time")], + ) + _treeify_or_and_conditions(res) + return res + + +def _build_snuba_request( + request: AggregateBucketRequest, +) -> SnubaRequest: + + return SnubaRequest( + id=str(uuid.uuid4()), + original_body=MessageToDict(request), + query=_build_query(request), + query_settings=HTTPQuerySettings(), + attribution_info=AttributionInfo( + referrer=request.meta.referrer, + team="eap", + feature="eap", + tenant_ids={ + "organization_id": request.meta.organization_id, + "referrer": request.meta.referrer, + }, + app_id=AppID("eap"), + parent_api="eap_timeseries", + ), + ) + + +def timeseries_query( + request: AggregateBucketRequest, timer: Timer | None = None +) -> AggregateBucketResponse: + timer = timer or Timer("timeseries_query") + snuba_request = _build_snuba_request(request) + res = run_query( + dataset=PluggableDataset(name="eap", all_entities=[]), + request=snuba_request, + timer=timer, + ) + assert res.result.get("data", None) is not None + return AggregateBucketResponse(result=[float(r["agg"]) for r in res.result["data"]]) diff --git a/snuba/web/views.py b/snuba/web/views.py index f0760421e9..2d651f77c3 100644 --- a/snuba/web/views.py +++ b/snuba/web/views.py @@ -35,6 +35,9 @@ render_template, ) from flask import request as http_request +from sentry_protos.snuba.v1alpha.endpoint_aggregate_bucket_pb2 import ( + AggregateBucketRequest, +) from werkzeug import Response as WerkzeugResponse from werkzeug.exceptions import InternalServerError @@ -51,7 +54,6 @@ from snuba.datasets.factory import InvalidDatasetError, get_dataset_name from snuba.datasets.schemas.tables import TableSchema from snuba.datasets.storage import StorageNotAvailable, WritableTableStorage -from snuba.protobufs import AggregateBucket_pb2, FindTrace_pb2 from snuba.query.allocation_policies import AllocationPolicyViolations from snuba.query.exceptions import InvalidQueryException, QueryPlanException from snuba.query.query_settings import HTTPQuerySettings @@ -76,6 +78,7 @@ from snuba.web.converters import DatasetConverter, EntityConverter, StorageConverter from snuba.web.delete_query import DeletesNotEnabledError, delete_from_storage from snuba.web.query import parse_and_run_query +from snuba.web.rpc.timeseries import timeseries_query as timeseries_query_impl from snuba.writer import BatchWriterEncoderWrapper, WriterTableRow logger = logging.getLogger("snuba.api") @@ -271,23 +274,13 @@ def unqualified_query_view(*, timer: Timer) -> Union[Response, str, WerkzeugResp assert False, "unexpected fallthrough" -@application.route("/find_trace", methods=["POST"]) -@util.time_request("query") -def find_trace_endpoint(*, timer: Timer) -> Union[Response, str, WerkzeugResponse]: - req = FindTrace_pb2.FindTraceRequest() - req.ParseFromString(http_request.data) - return "" # TODO this endpoint is not ready for primetime - - @application.route("/timeseries", methods=["POST"]) @util.time_request("timeseries_query") def timeseries_query(*, timer: Timer) -> Response: - req = AggregateBucket_pb2.AggregateBucketRequest() + req = AggregateBucketRequest() req.ParseFromString(http_request.data) # STUB - res = AggregateBucket_pb2.AggregateBucketResponse( - result=[float(i) for i in range(100)] - ) + res = timeseries_query_impl(req, timer) return Response(res.SerializeToString()) diff --git a/tests/web/rpc/test_timeseries_api.py b/tests/web/rpc/test_timeseries_api.py index 6352a61ae8..d6c9f63d83 100644 --- a/tests/web/rpc/test_timeseries_api.py +++ b/tests/web/rpc/test_timeseries_api.py @@ -1,9 +1,99 @@ +import random +import uuid +from datetime import datetime, timedelta +from typing import Any, Mapping + import pytest from google.protobuf.timestamp_pb2 import Timestamp +from sentry_protos.snuba.v1alpha.endpoint_aggregate_bucket_pb2 import ( + AggregateBucketRequest, +) +from sentry_protos.snuba.v1alpha.request_common_pb2 import RequestMeta -from snuba.protobufs import AggregateBucket_pb2 -from snuba.protobufs.BaseRequest_pb2 import RequestInfo +from snuba.datasets.storages.factory import get_storage +from snuba.datasets.storages.storage_key import StorageKey +from snuba.web.rpc.timeseries import timeseries_query from tests.base import BaseApiTest +from tests.helpers import write_raw_unprocessed_events + + +def gen_message(dt: datetime) -> Mapping[str, Any]: + return { + "description": "/api/0/relays/projectconfigs/", + "duration_ms": 152, + "event_id": "d826225de75d42d6b2f01b957d51f18f", + "exclusive_time_ms": 0.228, + "is_segment": True, + "data": { + "sentry.environment": "development", + "sentry.release": "backend@24.7.0.dev0+c45b49caed1e5fcbf70097ab3f434b487c359b6b", + "thread.name": "uWSGIWorker1Core0", + "thread.id": "8522009600", + "sentry.segment.name": "/api/0/relays/projectconfigs/", + "sentry.sdk.name": "sentry.python.django", + "sentry.sdk.version": "2.7.0", + "my.float.field": 101.2, + "my.int.field": 2000, + "my.neg.field": -100, + "my.neg.float.field": -101.2, + "my.true.bool.field": True, + "my.false.bool.field": False, + }, + "measurements": { + "num_of_spans": {"value": 50.0}, + "eap.measurement": {"value": 420}, + }, + "organization_id": 1, + "origin": "auto.http.django", + "project_id": 1, + "received": 1721319572.877828, + "retention_days": 90, + "segment_id": "8873a98879faf06d", + "sentry_tags": { + "category": "http", + "environment": "development", + "op": "http.server", + "platform": "python", + "release": "backend@24.7.0.dev0+c45b49caed1e5fcbf70097ab3f434b487c359b6b", + "sdk.name": "sentry.python.django", + "sdk.version": "2.7.0", + "status": "ok", + "status_code": "200", + "thread.id": "8522009600", + "thread.name": "uWSGIWorker1Core0", + "trace.status": "ok", + "transaction": "/api/0/relays/projectconfigs/", + "transaction.method": "POST", + "transaction.op": "http.server", + "user": "ip:127.0.0.1", + }, + "span_id": uuid.uuid4().hex, + "tags": { + "http.status_code": "200", + "relay_endpoint_version": "3", + "relay_id": "88888888-4444-4444-8444-cccccccccccc", + "relay_no_cache": "False", + "relay_protocol_version": "3", + "relay_use_post_or_schedule": "True", + "relay_use_post_or_schedule_rejected": "version", + "server_name": "D23CXQ4GK2.local", + "spans_over_limit": "False", + "color": random.choice(["red", "green", "blue"]), + "location": random.choice(["mobile", "frontend", "backend"]), + }, + "trace_id": uuid.uuid4().hex, + "start_timestamp_ms": int(dt.timestamp()) * 1000 - int(random.gauss(1000, 200)), + "start_timestamp_precise": dt.timestamp(), + "end_timestamp_precise": dt.timestamp() + 1, + } + + +@pytest.fixture(autouse=True) +def setup_teardown(clickhouse_db: None, redis_db: None) -> None: + spans_storage = get_storage(StorageKey("eap_spans")) + start = datetime.utcnow() - timedelta(hours=1) + messages = [gen_message(start + timedelta(minutes=i)) for i in range(60)] + write_raw_unprocessed_events(spans_storage, messages) # type: ignore @pytest.mark.clickhouse_db @@ -12,22 +102,36 @@ class TestTimeSeriesApi(BaseApiTest): def test_basic(self) -> None: ts = Timestamp() ts.GetCurrentTime() - message = AggregateBucket_pb2.AggregateBucketRequest( - request_info=RequestInfo( + message = AggregateBucketRequest( + meta=RequestMeta( project_ids=[1, 2, 3], organization_id=1, cogs_category="something", referrer="something", - start_timestamp=ts, - end_timestamp=ts, ), + aggregate=AggregateBucketRequest.FUNCTION_SUM, + start_timestamp=ts, + end_timestamp=ts, granularity_secs=60, ) response = self.app.post("/timeseries", data=message.SerializeToString()) assert response.status_code == 200 - # STUB response test - pbuf_response = AggregateBucket_pb2.AggregateBucketResponse() - pbuf_response.ParseFromString(response.data) - - assert pbuf_response.result == [float(i) for i in range(100)] + def test_with_data(self, setup_teardown: Any) -> None: + ts = Timestamp(seconds=int(datetime.utcnow().timestamp())) + hour_ago = int((datetime.utcnow() - timedelta(hours=1)).timestamp()) + message = AggregateBucketRequest( + meta=RequestMeta( + project_ids=[1, 2, 3], + organization_id=1, + cogs_category="something", + referrer="something", + ), + start_timestamp=Timestamp(seconds=hour_ago), + end_timestamp=ts, + metric_name="eap.measurement", + aggregate=AggregateBucketRequest.FUNCTION_AVERAGE, + granularity_secs=1, + ) + response = timeseries_query(message) + assert response.result == [420 for _ in range(60)] From e56b1e5f39cc06785080ab2db15904a1951b6067 Mon Sep 17 00:00:00 2001 From: Enoch Tang Date: Fri, 16 Aug 2024 14:13:38 -0400 Subject: [PATCH 03/12] bug(mql): Fix WITH TOTALS for MQL queries (#6218) ### Overview This PR is responsible for fixing a bug with the `totals` flag in MQL. When the `totals` flag is set to true, snuba will insert a `WITH TOTALS` modifier next to the `GROUP BY` in the query. Read more about WITH TOTALS [here](https://clickhouse.com/docs/en/sql-reference/statements/select/group-by#with-totals-modifier). However, if the query does not contain a `GROUP BY` then the modifier will not be added to the query. As a result, the flag is ignored and CH does not return a totals row. When this happens, the query AST still has `totals` set to true, and during [result transformation](https://github.com/getsentry/snuba/blob/fc11af17f7558e56a4139d061f47bef0369de0b7/snuba/clickhouse/native.py#L472), snuba pops the last element of the data array and assumes it is the totals row (even though it isn't). To fix this, in MQL parsing, we should only set and respect the totals flag only if there exists a group by. If not `WITH TOTALS` cannot be applied anyways, and therefore we should not set the flag. --- snuba/query/mql/context_population.py | 6 -- snuba/query/mql/parser_supported_join.py | 7 +- tests/query/parser/test_formula_mql_query.py | 90 +++++++++++++++++++- tests/query/parser/test_parser.py | 12 +-- tests/test_metrics_mql_api.py | 6 +- 5 files changed, 102 insertions(+), 19 deletions(-) diff --git a/snuba/query/mql/context_population.py b/snuba/query/mql/context_population.py index 9e9eb1a316..209b9a393c 100644 --- a/snuba/query/mql/context_population.py +++ b/snuba/query/mql/context_population.py @@ -114,12 +114,6 @@ def rollup_expressions( Literal(None, rollup.granularity), ) - # Validate totals/interval - if rollup.interval is None and rollup.with_totals in (None, "False"): - raise ParsingException( - "either interval or with_totals must be specified in rollup" - ) - # Validate totals/orderby if rollup.with_totals is not None and rollup.with_totals not in ("True", "False"): raise ParsingException("with_totals must be a string, either 'True' or 'False'") diff --git a/snuba/query/mql/parser_supported_join.py b/snuba/query/mql/parser_supported_join.py index c4d6ab4025..4d3b65806a 100644 --- a/snuba/query/mql/parser_supported_join.py +++ b/snuba/query/mql/parser_supported_join.py @@ -1301,7 +1301,6 @@ def populate_query_from_mql_context( ) query.add_condition_to_ast(context_condition) - query.set_totals(with_totals) if orderby: query.set_ast_orderby([orderby]) @@ -1315,6 +1314,10 @@ def populate_query_from_mql_context( else: query.set_ast_groupby([selected_time.expression]) + if query.get_groupby(): + # Only set WITH TOTALS if there is a group by. + query.set_totals(with_totals) + if isinstance(query, CompositeQuery): def add_time_join_keys(join_clause: JoinClause[Any]) -> str: @@ -1386,7 +1389,7 @@ def convert_to_cross_join(join_clause: JoinClause[Any]) -> JoinClause[Any]: # ensure we correctly join the subqueries. The column names will be the same for all the # subqueries, so we just need to map all the table aliases. add_time_join_keys(join_clause) - elif query.has_totals() and no_groupby_or_one_sided_groupby: + elif with_totals and no_groupby_or_one_sided_groupby: # If formula query has no interval and no group by or a onesided groupby, but has totals, we need to convert # join type to a CROSS join. This is because without a group by, each sub-query will return # a single row with single value column. In order to combine the results in the outer query, diff --git a/tests/query/parser/test_formula_mql_query.py b/tests/query/parser/test_formula_mql_query.py index f62f4fa142..9fbf6906f8 100644 --- a/tests/query/parser/test_formula_mql_query.py +++ b/tests/query/parser/test_formula_mql_query.py @@ -877,6 +877,94 @@ def test_groupby() -> None: assert eq, reason +def test_groupby_with_totals() -> None: + mql_context_new = deepcopy(mql_context) + mql_context_new["rollup"]["with_totals"] = "True" + mql_context_new["rollup"]["interval"] = None + query_body = "sum(`d:transactions/duration@millisecond`){status_code:200} by transaction / sum(`d:transactions/duration@millisecond`) by transaction" + + expected_selected = SelectedExpression( + "aggregate_value", + divide( + FunctionCall( + None, + "sum", + (Column("_snuba_value", "d0", "value"),), + ), + FunctionCall( + None, + "sum", + (Column("_snuba_value", "d1", "value"),), + ), + "_snuba_aggregate_value", + ), + ) + + join_clause = JoinClause( + left_node=IndividualNode( + alias="d1", + data_source=from_distributions, + ), + right_node=IndividualNode( + alias="d0", + data_source=from_distributions, + ), + keys=[ + JoinCondition( + left=JoinConditionExpression( + table_alias="d1", column="tags_raw[333333]" + ), + right=JoinConditionExpression( + table_alias="d0", column="tags_raw[333333]" + ), + ) + ], + join_type=JoinType.INNER, + join_modifier=None, + ) + + tag_condition = binary_condition( + "equals", tag_column("status_code", "d0"), Literal(None, "200") + ) + metric_condition1 = metric_id_condition(123456, "d0") + metric_condition2 = metric_id_condition(123456, "d1") + formula_condition = combine_and_conditions( + condition("d0") + + condition("d1") + + [tag_condition, metric_condition1, metric_condition2] + ) + + expected = CompositeQuery( + from_clause=join_clause, + selected_columns=[ + expected_selected, + SelectedExpression( + "transaction", + subscriptable_expression("333333", "d0"), + ), + SelectedExpression( + "transaction", + subscriptable_expression("333333", "d1"), + ), + ], + groupby=[ + subscriptable_expression("333333", "d0"), + subscriptable_expression("333333", "d1"), + ], + condition=formula_condition, + limit=1000, + offset=0, + totals=True, + ) + + generic_metrics = get_dataset( + "generic_metrics", + ) + query = parse_mql_query_new(str(query_body), mql_context_new, generic_metrics) + eq, reason = query.equals(expected) + assert eq, reason + + def test_mismatch_groupby() -> None: query_body = "sum(`d:transactions/duration@millisecond`){status_code:200} by transaction / sum(`d:transactions/duration@millisecond`) by status_code" generic_metrics = get_dataset( @@ -1311,7 +1399,7 @@ def test_formula_no_groupby_no_interval_with_totals() -> None: order_by=[], limit=1000, offset=0, - totals=True, + totals=False, ) generic_metrics = get_dataset( diff --git a/tests/query/parser/test_parser.py b/tests/query/parser/test_parser.py index a419efb1eb..d359f39376 100644 --- a/tests/query/parser/test_parser.py +++ b/tests/query/parser/test_parser.py @@ -40,7 +40,7 @@ def test_mql() -> None: "orderby": "ASC", "granularity": 60, "interval": None, - "with_totals": "True", + "with_totals": "False", }, "scope": { "org_ids": [1], @@ -130,7 +130,7 @@ def test_mql() -> None: ), ), ], - totals=True, + totals=False, limit=1000, ) actual = parse_mql_query_new(mql, context, get_dataset("generic_metrics")) @@ -147,7 +147,7 @@ def test_mql_wildcards() -> None: "orderby": "ASC", "granularity": 60, "interval": None, - "with_totals": "True", + "with_totals": "False", }, "scope": { "org_ids": [1], @@ -236,7 +236,7 @@ def test_mql_wildcards() -> None: ), ], limit=1000, - totals=True, + totals=False, ) actual = parse_mql_query_new(mql, context, get_dataset("generic_metrics")) eq, reason = actual.equals(expected) @@ -252,7 +252,7 @@ def test_mql_negated_wildcards() -> None: "orderby": "ASC", "granularity": 60, "interval": None, - "with_totals": "True", + "with_totals": "False", }, "scope": { "org_ids": [1], @@ -341,7 +341,7 @@ def test_mql_negated_wildcards() -> None: ), ], limit=1000, - totals=True, + totals=False, ) actual = parse_mql_query_new(mql, context, get_dataset("generic_metrics")) eq, reason = actual.equals(expected) diff --git a/tests/test_metrics_mql_api.py b/tests/test_metrics_mql_api.py index 09f1a329b9..724868e158 100644 --- a/tests/test_metrics_mql_api.py +++ b/tests/test_metrics_mql_api.py @@ -742,7 +742,7 @@ def test_formula_no_groupby_no_interval_with_totals(self) -> None: assert response.status_code == 200, response.data data = json.loads(response.data) - assert data["totals"]["aggregate_value"] == 4.0 + assert len(data["data"]) == 1, data def test_formula_no_groupby_with_interval_no_totals(self) -> None: query = MetricsQuery( @@ -1560,9 +1560,7 @@ def test_nested_formula_no_group_with_totals(self) -> None: ).serialize_mql(), ) data = json.loads(response.data) - assert ( - data["totals"]["aggregate_value"] > 180 - ) # Should be more than the number of data points + assert len(data["data"]) == 1, data assert response.status_code == 200 def test_simple_formula_filters_with_scalar(self) -> None: From 38c51d0a97c7bbfdb4ac232467b8836e83e86c83 Mon Sep 17 00:00:00 2001 From: Ian Woodard <17186604+IanWoodard@users.noreply.github.com> Date: Fri, 16 Aug 2024 12:08:14 -0700 Subject: [PATCH 04/12] fix(snuba-admin): Updating snuba admin link to open a new tab (#6219) This change updates the warning link on the "ClickHouse Tracing" page to open a new tab when clicked. This makes it easier to navigate back once the linked is clicked and the information is read. --- snuba/admin/static/tracing/query_display.tsx | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/snuba/admin/static/tracing/query_display.tsx b/snuba/admin/static/tracing/query_display.tsx index f026cab5ca..8542baf048 100644 --- a/snuba/admin/static/tracing/query_display.tsx +++ b/snuba/admin/static/tracing/query_display.tsx @@ -92,7 +92,10 @@ function QueryDisplay(props: { return (

Construct a ClickHouse Query

- + 🛑 WARNING! BEFORE RUNNING QUERIES, READ THIS 🛑 Date: Mon, 19 Aug 2024 10:43:53 -0700 Subject: [PATCH 05/12] Added a few missing details on Snuba development environment README page (#6220) I ran into issues regarding python 3.8.11 version and rust while setting up the local development environment using https://getsentry.github.io/snuba/contributing/environment.html. This PR fixes the documentation and makes it more consistent with what is being used in the team today. Co-authored-by: Onkar Deshpande --- docs/source/contributing/environment.rst | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/docs/source/contributing/environment.rst b/docs/source/contributing/environment.rst index 077a0ab83b..0735425e5e 100644 --- a/docs/source/contributing/environment.rst +++ b/docs/source/contributing/environment.rst @@ -9,8 +9,20 @@ In order to set up Clickhouse, Redis, and Kafka, please refer to :doc:`/getstart Prerequisites ------------- -`pyenv `_ must be installed on your system. -It is also assumed that you have completed the steps to set up the `sentry dev environment `_. +It is assumed that you have completed the steps to set up the `sentry dev environment `_. +Install `pyenv `_ on your system using Homebrew:: + + brew install pyenv + +You may have other than Python 3.11.8 installed on your machine, but Snuba needs Python 3.11.8:: + + pyenv install 3.11.8 + +You will need an installation of Rust to develop Snuba. Go `here `_ to get one:: + + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh + +Make sure to follow installation steps and configure your shell for cargo (Rust's build sytem and package manager). If you are using Homebrew and a M1 Mac, ensure the development packages you've installed with Homebrew are available by setting these environment variables:: @@ -21,9 +33,9 @@ by setting these environment variables:: Install / Run ------------- -clone this repo into your workspace:: +Clone this repo into your workspace:: - git@github.com:getsentry/snuba.git + git clone git@github.com:getsentry/snuba.git These commands set up the Python virtual environment:: @@ -33,11 +45,11 @@ These commands set up the Python virtual environment:: pip install --upgrade pip==22.2.2 make develop -These commands start the Snuba api, which is capable of processing queries:: +This command starts the Snuba api, which is capable of processing queries:: snuba api -This command instead will start the api and all the Snuba consumers to ingest +This command starts the api and Snuba consumers to ingest data from Kafka:: snuba devserver From af81c2a231d616d445911cd017c8d9481f5f1bdd Mon Sep 17 00:00:00 2001 From: Evan Hicks Date: Mon, 19 Aug 2024 14:34:05 -0400 Subject: [PATCH 06/12] ref(admin): Show raw trace output in the Admin tool (#6221) Have an option to show the raw trace output in the Tracing tool, as well as the ability to copy just the raw tracing output instead of the full JSON response. ![raw-output](https://github.com/user-attachments/assets/276a6700-a376-492d-9d19-88eee3c48955) --- snuba/admin/static/tracing/index.tsx | 20 ++++++++++++++++++-- snuba/admin/static/tracing/query_display.tsx | 10 ++++++++-- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/snuba/admin/static/tracing/index.tsx b/snuba/admin/static/tracing/index.tsx index 0e154e18ac..21be576a00 100644 --- a/snuba/admin/static/tracing/index.tsx +++ b/snuba/admin/static/tracing/index.tsx @@ -198,7 +198,7 @@ function TracingQueries(props: { api: Client }) {
); } else if (title === "Trace") { - if (!showFormatted) { + if (showFormatted) { return (

@@ -213,7 +213,7 @@ function TracingQueries(props: { api: Client }) {
Number of rows in result set: {value.num_rows_result}
- {formattedTraceDisplay(title, value.formatted_trace_output)} + {rawTraceDisplay(title, value.trace_output)}
); } @@ -223,6 +223,22 @@ function TracingQueries(props: { api: Client }) { ); } + function rawTraceDisplay(title: string, value: any): JSX.Element | undefined { + const parsedLines: Array = value.split(/\n/); + + return ( +
    + {parsedLines.map((line, index) => { + return ( +
  1. + {line} +
  2. + ); + })} +
+ ); + } + function heirarchicalRawTraceDisplay( title: string, value: any diff --git a/snuba/admin/static/tracing/query_display.tsx b/snuba/admin/static/tracing/query_display.tsx index 8542baf048..d7ce4e0fb7 100644 --- a/snuba/admin/static/tracing/query_display.tsx +++ b/snuba/admin/static/tracing/query_display.tsx @@ -27,7 +27,7 @@ function QueryDisplay(props: { [] ); const [isExecuting, setIsExecuting] = useState(false); - const [showFormatted, setShowFormatted] = useState(false); + const [showFormatted, setShowFormatted] = useState(true); useEffect(() => { props.api.getClickhouseNodes().then((res) => { @@ -148,11 +148,17 @@ function QueryDisplay(props: { rowData={queryResultHistory.map((queryResult) => [ {queryResult.input_query},
+ {props.resultDataPopulator(queryResult, showFormatted)}
, From 50bf188b784fc8d817e3e0463363f53d4e764c11 Mon Sep 17 00:00:00 2001 From: Evan Hicks Date: Tue, 20 Aug 2024 13:04:01 -0400 Subject: [PATCH 07/12] ref(admin): Standardize Execute Query buttons (#6222) This removes a lot of duplicate logic around handling errors and ensures that every page has a proper loading indicator. ![execute-button](https://github.com/user-attachments/assets/c267bdc0-f15d-498e-98f7-b3656e1fe0f9) --- .../cardinality_analyzer/query_display.tsx | 79 +++++++++++-------- .../clickhouse_queries/query_display.tsx | 19 ++--- .../admin/static/production_queries/index.tsx | 48 +++++------ snuba/admin/static/querylog/query_display.tsx | 26 ++---- .../admin/static/tests/tracing/index.spec.tsx | 2 +- .../tests/utils/execute_button.spec.tsx | 51 ++++++++++++ snuba/admin/static/tracing/query_display.tsx | 27 ++----- snuba/admin/static/utils/execute_button.tsx | 48 +++++++++++ 8 files changed, 184 insertions(+), 116 deletions(-) create mode 100644 snuba/admin/static/tests/utils/execute_button.spec.tsx create mode 100644 snuba/admin/static/utils/execute_button.tsx diff --git a/snuba/admin/static/cardinality_analyzer/query_display.tsx b/snuba/admin/static/cardinality_analyzer/query_display.tsx index b5be9c6ed0..861d8d3c25 100644 --- a/snuba/admin/static/cardinality_analyzer/query_display.tsx +++ b/snuba/admin/static/cardinality_analyzer/query_display.tsx @@ -3,8 +3,13 @@ import Client from "SnubaAdmin/api_client"; import { Collapse } from "SnubaAdmin/collapse"; import { CSV } from "SnubaAdmin/cardinality_analyzer/CSV"; import QueryEditor from "SnubaAdmin/query_editor"; +import ExecuteButton from "SnubaAdmin/utils/execute_button"; -import { CardinalityQueryRequest, CardinalityQueryResult, PredefinedQuery } from "./types"; +import { + CardinalityQueryRequest, + CardinalityQueryResult, + PredefinedQuery, +} from "SnubaAdmin/cardinality_analyzer/types"; enum ClipboardFormats { CSV = "csv", @@ -18,7 +23,9 @@ function QueryDisplay(props: { predefinedQueryOptions: Array; }) { const [query, setQuery] = useState({}); - const [queryResultHistory, setCardinalityQueryResultHistory] = useState([]); + const [queryResultHistory, setCardinalityQueryResultHistory] = useState< + CardinalityQueryResult[] + >([]); function updateQuerySql(sql: string) { setQuery((prevQuery) => { @@ -29,25 +36,16 @@ function QueryDisplay(props: { }); } - function executeQuery() { - props.api - .executeCardinalityQuery(query as CardinalityQueryRequest) - .then((result) => { - result.input_query = query.sql || ""; - setCardinalityQueryResultHistory((prevHistory) => [result, ...prevHistory]); - }) - .catch((err) => { - console.log("ERROR", err); - window.alert("An error occurred: " + err.error.message); - }); - } - function convertResultsToCSV(queryResult: CardinalityQueryResult) { return CSV.sheet([queryResult.column_names, ...queryResult.rows]); } - function copyText(queryResult: CardinalityQueryResult, format: ClipboardFormats) { - let formatter: (input: CardinalityQueryResult) => string = (s) => s.toString(); + function copyText( + queryResult: CardinalityQueryResult, + format: ClipboardFormats + ) { + let formatter: (input: CardinalityQueryResult) => string = (s) => + s.toString(); if (format === ClipboardFormats.JSON) { formatter = JSON.stringify; @@ -60,6 +58,18 @@ function QueryDisplay(props: { window.navigator.clipboard.writeText(formatter(queryResult)); } + function executeQuery() { + return props.api + .executeCardinalityQuery(query as CardinalityQueryRequest) + .then((result) => { + result.input_query = query.sql || ""; + setCardinalityQueryResultHistory((prevHistory) => [ + result, + ...prevHistory, + ]); + }); + } + return (

Construct a Metrics Query

@@ -70,18 +80,7 @@ function QueryDisplay(props: { predefinedQueryOptions={props.predefinedQueryOptions} />
-
- -
+

Query results

@@ -91,12 +90,18 @@ function QueryDisplay(props: {

{queryResult.input_query}

-

-

@@ -107,10 +112,16 @@ function QueryDisplay(props: { return ( - - {props.resultDataPopulator(queryResult)} @@ -128,7 +139,7 @@ const executeActionsStyle = { marginTop: 8, }; -const executeButtonStyle = { +const copyButtonStyle = { height: 30, border: 0, padding: "4px 20px", diff --git a/snuba/admin/static/clickhouse_queries/query_display.tsx b/snuba/admin/static/clickhouse_queries/query_display.tsx index df1802836c..e3136a55c5 100644 --- a/snuba/admin/static/clickhouse_queries/query_display.tsx +++ b/snuba/admin/static/clickhouse_queries/query_display.tsx @@ -2,6 +2,7 @@ import React, { useEffect, useState } from "react"; import Client from "SnubaAdmin/api_client"; import { Collapse } from "SnubaAdmin/collapse"; import QueryEditor from "SnubaAdmin/query_editor"; +import ExecuteButton from "SnubaAdmin/utils/execute_button"; import { Prism } from "@mantine/prism"; import { RichTextEditor } from "@mantine/tiptap"; @@ -71,15 +72,11 @@ function QueryDisplay(props: { } function executeQuery() { - props.api + return props.api .executeSystemQuery(query as QueryRequest) .then((result) => { result.input_query = `${query.sql} (${query.storage},${query.host}:${query.port})`; setQueryResultHistory((prevHistory) => [result, ...prevHistory]); - }) - .catch((err) => { - console.log("ERROR", err); - window.alert("An error occurred: " + err.error); }); } @@ -167,18 +164,12 @@ function QueryDisplay(props: {
- + />
diff --git a/snuba/admin/static/production_queries/index.tsx b/snuba/admin/static/production_queries/index.tsx index cedcb27eab..5b7452b9c0 100644 --- a/snuba/admin/static/production_queries/index.tsx +++ b/snuba/admin/static/production_queries/index.tsx @@ -1,7 +1,12 @@ import React, { useEffect, useState } from "react"; import Client from "SnubaAdmin/api_client"; import { Table } from "SnubaAdmin/table"; -import { QueryResult, QueryResultColumnMeta, SnQLRequest } from "SnubaAdmin/production_queries/types"; +import { + QueryResult, + QueryResultColumnMeta, + SnQLRequest, +} from "SnubaAdmin/production_queries/types"; +import ExecuteButton from "SnubaAdmin/utils/execute_button"; import { executeActionsStyle } from "SnubaAdmin/production_queries/styles"; import { Accordion, @@ -58,11 +63,7 @@ function ProductionQueries(props: { api: Client }) { } function executeQuery() { - if (isExecuting) { - window.alert("A query is already running"); - } - setIsExecuting(true); - props.api + return props.api .executeSnQLQuery(snql_query as SnQLRequest) .then((result) => { const result_columns = result.meta.map( @@ -80,13 +81,6 @@ function ProductionQueries(props: { api: Client }) { quota_allowance: result.quota_allowance, }; setQueryResultHistory((prevHistory) => [query_result, ...prevHistory]); - }) - .catch((err) => { - console.log("ERROR", err); - window.alert("An error occurred: " + err.message); - }) - .finally(() => { - setIsExecuting(false); }); } @@ -116,15 +110,12 @@ function ProductionQueries(props: { api: Client }) { />
- + />
@@ -225,9 +216,9 @@ function renderThrottleStatus(isThrottled: boolean, reasonHeader: string[]) { > Quota Allowance - Throttled
    - {reasonHeader.map((line, index) => ( + {reasonHeader.map((line, index) => (
  1. {line}
  2. - ))} + ))}
) : ( @@ -261,8 +252,9 @@ function QueryResultQuotaAllowance(props: { queryResult: QueryResult }) { const isThrottled: boolean = (props.queryResult.quota_allowance && Object.values(props.queryResult.quota_allowance).some( - (policy) => policy.max_threads < 10, - )) || false; + (policy) => policy.max_threads < 10 + )) || + false; let reasonHeader: string[] = []; if (isThrottled) { props.queryResult.quota_allowance && @@ -270,8 +262,12 @@ function QueryResultQuotaAllowance(props: { queryResult: QueryResult }) { const policy = props.queryResult.quota_allowance![policyName]; if (policy.max_threads < 10 && policy.explanation.reason != null) { reasonHeader.push( - policyName + ": " + policy.explanation.reason + - ". SnQL Query executed with " + policy.max_threads + " threads.", + policyName + + ": " + + policy.explanation.reason + + ". SnQL Query executed with " + + policy.max_threads + + " threads." ); } }); @@ -282,9 +278,7 @@ function QueryResultQuotaAllowance(props: { queryResult: QueryResult }) { {renderThrottleStatus(isThrottled, reasonHeader)} - - {renderPolicyDetails(props)} - + {renderPolicyDetails(props)} ); diff --git a/snuba/admin/static/querylog/query_display.tsx b/snuba/admin/static/querylog/query_display.tsx index 12b304465b..f1a45dd195 100644 --- a/snuba/admin/static/querylog/query_display.tsx +++ b/snuba/admin/static/querylog/query_display.tsx @@ -1,9 +1,11 @@ import React, { useState } from "react"; +import { Button } from "@mantine/core"; import Client from "SnubaAdmin/api_client"; import { Collapse } from "SnubaAdmin/collapse"; import QueryEditor from "SnubaAdmin/query_editor"; import { QuerylogRequest, QuerylogResult, PredefinedQuery } from "./types"; +import ExecuteButton from "SnubaAdmin/utils/execute_button"; type QueryState = Partial; @@ -27,15 +29,11 @@ function QueryDisplay(props: { } function executeQuery() { - props.api + return props.api .executeQuerylogQuery(query as QuerylogRequest) .then((result) => { result.input_query = query.sql || ""; setQueryResultHistory((prevHistory) => [result, ...prevHistory]); - }) - .catch((err) => { - console.log("ERROR", err); - window.alert("An error occurred: " + err.error.message); }); } @@ -79,25 +77,15 @@ function QueryDisplay(props: { />
- - +
diff --git a/snuba/admin/static/tests/tracing/index.spec.tsx b/snuba/admin/static/tests/tracing/index.spec.tsx index fae7e8a3d2..d876b546cb 100644 --- a/snuba/admin/static/tests/tracing/index.spec.tsx +++ b/snuba/admin/static/tests/tracing/index.spec.tsx @@ -46,7 +46,7 @@ it("select executor rows should appear", async () => { target: { value: "Foo" }, }); - const submitButton = screen.getByText("Execute query"); + const submitButton = screen.getByText("Execute Query"); expect(submitButton.getAttribute("disabled")).toBeFalsy(); fireEvent.click(submitButton); diff --git a/snuba/admin/static/tests/utils/execute_button.spec.tsx b/snuba/admin/static/tests/utils/execute_button.spec.tsx new file mode 100644 index 0000000000..c1ec9094ce --- /dev/null +++ b/snuba/admin/static/tests/utils/execute_button.spec.tsx @@ -0,0 +1,51 @@ +import Nav from "SnubaAdmin/nav"; +import Client from "SnubaAdmin/api_client"; +import React from "react"; +import { it, expect, jest, afterEach } from "@jest/globals"; +import { + render, + act, + waitFor, + screen, + fireEvent, +} from "@testing-library/react"; +import { AllowedTools } from "SnubaAdmin/types"; +import ExecuteButton from "SnubaAdmin/utils/execute_button"; + +it("should call onClick", async () => { + let mockCall = jest.fn<() => Promise>().mockResolvedValueOnce({}); + + render(); + + const button = screen.getByRole("button"); + fireEvent.click(button); + + await waitFor(() => expect(mockCall).toBeCalledTimes(1)); +}); + +it("should not call if disabled", async () => { + let mockCall = jest.fn( + () => new Promise((resolve) => setTimeout(resolve, 1000)) + ); + + render(); + + const button = screen.getByRole("button"); + fireEvent.click(button); + + await waitFor(() => expect(mockCall).toBeCalledTimes(0)); +}); + +it("should not call if loading", async () => { + let mockCall = jest.fn( + () => new Promise((resolve) => setTimeout(resolve, 1000)) + ); + + render(); + + const button = screen.getByRole("button"); + fireEvent.click(button); + fireEvent.click(button); + + await waitFor(() => expect(mockCall).toBeCalledTimes(1)); +}); diff --git a/snuba/admin/static/tracing/query_display.tsx b/snuba/admin/static/tracing/query_display.tsx index d7ce4e0fb7..e07c6711f8 100644 --- a/snuba/admin/static/tracing/query_display.tsx +++ b/snuba/admin/static/tracing/query_display.tsx @@ -3,6 +3,7 @@ import { Switch } from "@mantine/core"; import Client from "SnubaAdmin/api_client"; import QueryEditor from "SnubaAdmin/query_editor"; import { Table } from "SnubaAdmin/table"; +import ExecuteButton from "SnubaAdmin/utils/execute_button"; import { LogLine, @@ -45,11 +46,7 @@ function QueryDisplay(props: { } function executeQuery() { - if (isExecuting) { - window.alert("A query is already running"); - } - setIsExecuting(true); - props.api + return props.api .executeTracingQuery(query as TracingRequest) .then((result) => { const tracing_result = { @@ -65,13 +62,6 @@ function QueryDisplay(props: { tracing_result, ...prevHistory, ]); - }) - .catch((err) => { - console.log("ERROR", err); - window.alert("An error occurred: " + err.error.message); - }) - .finally(() => { - setIsExecuting(false); }); } @@ -121,15 +111,10 @@ function QueryDisplay(props: { ))}
-
- -
+

Query results

diff --git a/snuba/admin/static/utils/execute_button.tsx b/snuba/admin/static/utils/execute_button.tsx new file mode 100644 index 0000000000..a301e84ac7 --- /dev/null +++ b/snuba/admin/static/utils/execute_button.tsx @@ -0,0 +1,48 @@ +import React, { useState } from "react"; +import { Button } from "@mantine/core"; + +function ExecuteButton(props: { + disabled: boolean; + onClick: () => Promise; + onError?: (error: any) => any; +}) { + const [isExecuting, setIsExecuting] = useState(false); + + const defaultError = (err: any) => { + console.log("ERROR", err); + window.alert("An error occurred: " + err.error.message); + }; + let errorCallback = props.onError || defaultError; + + function executeQuery() { + if (isExecuting) { + window.alert("A query is already running"); + } + setIsExecuting(true); + props + .onClick() + .catch((err: any) => { + errorCallback(err); + }) + .finally(() => { + setIsExecuting(false); + }); + } + + return ( +
+ +
+ ); +} + +export default ExecuteButton; From 275fc6f70e0c223ce27d24afd2777444834527ad Mon Sep 17 00:00:00 2001 From: volokluev <3169433+volokluev@users.noreply.github.com> Date: Tue, 20 Aug 2024 13:55:26 -0700 Subject: [PATCH 08/12] feat(dsl): Add curried functions to dsl (#6225) We have a convenient way to express functions in the DSL, expand to curried functions (which are a pain in the ass) and add a test --- snuba/query/dsl.py | 63 +++++++++++++++++++++++++++++++++++++---- tests/query/test_dsl.py | 18 ++++++++++++ 2 files changed, 75 insertions(+), 6 deletions(-) create mode 100644 tests/query/test_dsl.py diff --git a/snuba/query/dsl.py b/snuba/query/dsl.py index cbe9bec523..e1dd4e43ef 100644 --- a/snuba/query/dsl.py +++ b/snuba/query/dsl.py @@ -4,6 +4,7 @@ from snuba.query.expressions import ( Column, + CurriedFunctionCall, Expression, FunctionCall, Literal, @@ -37,39 +38,89 @@ def __getitem__(self, key: str) -> SubscriptableReference: ) +def _arg_to_literal_expr(arg: Expression | OptionalScalarType) -> Expression: + if isinstance(arg, Expression): + return arg + return Literal(None, arg) + + class _FunctionCall: def __init__(self, name: str) -> None: self.name = name - def _arg_to_literal_expr(self, arg: Expression | OptionalScalarType) -> Expression: - if isinstance(arg, Expression): - return arg - return Literal(None, arg) - def __call__( self, *args: Expression | OptionalScalarType, **kwargs: str ) -> FunctionCall: alias = kwargs.pop("alias", None) if kwargs: raise ValueError(f"Unsuppored dsl kwargs: {kwargs}") - transformed_args = [self._arg_to_literal_expr(arg) for arg in args] + transformed_args = [_arg_to_literal_expr(arg) for arg in args] return FunctionCall(alias, self.name, tuple(transformed_args)) +class _CurriedFunctionCall: + def __init__(self, internal_function: FunctionCall): + self.internal_function = internal_function + + def __call__( + self, *args: Expression | OptionalScalarType, **kwargs: str + ) -> CurriedFunctionCall: + alias = kwargs.pop("alias", None) + if kwargs: + raise ValueError(f"Unsuppored dsl kwargs: {kwargs}") + + transformed_args = [_arg_to_literal_expr(arg) for arg in args] + + return CurriedFunctionCall( + alias=alias, + internal_function=self.internal_function, + parameters=tuple(transformed_args), + ) + + class _Functions: def __getattr__(self, name: str) -> _FunctionCall: return _FunctionCall(name) +class _InternalCurriedFunction: + def __init__(self, name: str) -> None: + self.name = name + + def __call__( + self, *args: Expression | OptionalScalarType, **kwargs: str + ) -> _CurriedFunctionCall: + alias = kwargs.pop("alias", None) + if kwargs: + raise ValueError(f"Unsuppored dsl kwargs: {kwargs}") + transformed_args = [_arg_to_literal_expr(arg) for arg in args] + internal_function = FunctionCall(alias, self.name, tuple(transformed_args)) + return _CurriedFunctionCall(internal_function=internal_function) + + +class _CurriedFunctions: + def __getattr__(self, name: str) -> _InternalCurriedFunction: + return _InternalCurriedFunction(name) + + """ Usage: +from snuba.query.dsl import CurriedFunctions as cf from snuba.query.dsl import Functions as f + assert f.equals(1, 1, alias="eq") == FunctionCall( "eq", "equals" (Literal(None, 1), Literal(None, 1)) ) + +assert cf.quantile(0.9)(column("measurement"), alias="p90") == CurriedFunctionCall( + alias="p90", + internal_function=f.quantile(0.9), + parameters=(column("measurement"), ) + ) """ Functions = _Functions() +CurriedFunctions = _CurriedFunctions() def column( diff --git a/tests/query/test_dsl.py b/tests/query/test_dsl.py new file mode 100644 index 0000000000..956a5dbe3e --- /dev/null +++ b/tests/query/test_dsl.py @@ -0,0 +1,18 @@ +from snuba.query.dsl import CurriedFunctions as cf +from snuba.query.dsl import Functions as f +from snuba.query.dsl import column +from snuba.query.expressions import CurriedFunctionCall, FunctionCall, Literal + + +def test_function_syntax() -> None: + assert f.equals(1, 1, alias="eq") == FunctionCall( + "eq", "equals", parameters=(Literal(None, 1), Literal(None, 1)) + ) + + +def test_curried_function() -> None: + assert cf.quantile(0.9)(column("measurement"), alias="p90") == CurriedFunctionCall( + alias="p90", + internal_function=f.quantile(0.9), + parameters=(column("measurement"),), + ) From ad47e0b18f9699db7f9233912a638c827322fe66 Mon Sep 17 00:00:00 2001 From: Evan Hicks Date: Wed, 21 Aug 2024 11:40:23 -0400 Subject: [PATCH 09/12] fix(admin) Cleanup unused functions after previous changes (#6227) The tracing tool was simplified, and a common execute button was added, and now a lot of this functionality is obsolete. The main logic of the tracing tool was moved into the query display module, so there was a lot of obsolete code in that file. --- .../admin/static/production_queries/index.tsx | 3 +- snuba/admin/static/snql_to_sql/index.tsx | 35 ++-- snuba/admin/static/snuba_explain/index.tsx | 40 ++-- snuba/admin/static/tracing/index.tsx | 173 +----------------- snuba/admin/static/tracing/query_display.tsx | 1 - snuba/admin/static/utils/execute_button.tsx | 5 +- 6 files changed, 32 insertions(+), 225 deletions(-) diff --git a/snuba/admin/static/production_queries/index.tsx b/snuba/admin/static/production_queries/index.tsx index 5b7452b9c0..199cdd022c 100644 --- a/snuba/admin/static/production_queries/index.tsx +++ b/snuba/admin/static/production_queries/index.tsx @@ -21,7 +21,7 @@ import { Textarea, } from "@mantine/core"; import { useDisclosure } from "@mantine/hooks"; -import { CSV } from "../cardinality_analyzer/CSV"; +import { CSV } from "SnubaAdmin/cardinality_analyzer/CSV"; function ProductionQueries(props: { api: Client }) { const [datasets, setDatasets] = useState([]); @@ -30,7 +30,6 @@ function ProductionQueries(props: { api: Client }) { const [queryResultHistory, setQueryResultHistory] = useState( [] ); - const [isExecuting, setIsExecuting] = useState(false); useEffect(() => { props.api.getSnubaDatasetNames().then((res) => { diff --git a/snuba/admin/static/snql_to_sql/index.tsx b/snuba/admin/static/snql_to_sql/index.tsx index a1aea12250..2cc5b06833 100644 --- a/snuba/admin/static/snql_to_sql/index.tsx +++ b/snuba/admin/static/snql_to_sql/index.tsx @@ -2,8 +2,13 @@ import React, { useEffect, useState } from "react"; import Client from "SnubaAdmin/api_client"; import { Table } from "SnubaAdmin/table"; -import { executeActionsStyle, selectStyle, executeButtonStyle } from "SnubaAdmin/snql_to_sql/styles"; +import { + executeActionsStyle, + selectStyle, + executeButtonStyle, +} from "SnubaAdmin/snql_to_sql/styles"; import { TextArea } from "SnubaAdmin/snql_to_sql/utils"; +import ExecuteButton from "SnubaAdmin/utils/execute_button"; import { SnQLRequest, SnQLResult, @@ -17,7 +22,6 @@ function SnQLToSQL(props: { api: Client }) { const [queryResultHistory, setQueryResultHistory] = useState( [] ); - const [isExecuting, setIsExecuting] = useState(false); useEffect(() => { props.api.getSnubaDatasetNames().then((res) => { @@ -44,11 +48,7 @@ function SnQLToSQL(props: { api: Client }) { } function convertQuery() { - if (isExecuting) { - window.alert("A query is already running"); - } - setIsExecuting(true); - props.api + return props.api .debugSnQLQuery(snql_query as SnQLRequest) .then((result) => { const query_result = { @@ -56,13 +56,6 @@ function SnQLToSQL(props: { api: Client }) { sql: result.sql, }; setQueryResultHistory((prevHistory) => [query_result, ...prevHistory]); - }) - .catch((err) => { - console.log("ERROR", err); - window.alert("An error occurred: " + err.message); - }) - .finally(() => { - setIsExecuting(false); }); } @@ -91,17 +84,13 @@ function SnQLToSQL(props: { api: Client }) {
- + label="Convert Query" + />
diff --git a/snuba/admin/static/snuba_explain/index.tsx b/snuba/admin/static/snuba_explain/index.tsx index cc4d254cb7..e9d8e3e0cd 100644 --- a/snuba/admin/static/snuba_explain/index.tsx +++ b/snuba/admin/static/snuba_explain/index.tsx @@ -4,13 +4,18 @@ import { Prism } from "@mantine/prism"; import Client from "SnubaAdmin/api_client"; import QueryEditor from "SnubaAdmin/query_editor"; import { Collapse } from "SnubaAdmin/collapse"; -import { SnQLRequest, SnQLResult, ExplainResult, ExplainStep } from "SnubaAdmin/snuba_explain/types"; +import { + SnQLRequest, + SnQLResult, + ExplainResult, + ExplainStep, +} from "SnubaAdmin/snuba_explain/types"; import { Step } from "SnubaAdmin/snuba_explain/step_render"; +import ExecuteButton from "SnubaAdmin/utils/execute_button"; import { executeActionsStyle, selectStyle, - executeButtonStyle, collapsibleStyle, } from "SnubaAdmin/snuba_explain/styles"; import { SnubaDatasetName, SnQLQueryState } from "SnubaAdmin/snql_to_sql/types"; @@ -21,7 +26,6 @@ function SnubaExplain(props: { api: Client }) { const [queryResultHistory, setQueryResultHistory] = useState( [] ); - const [isExecuting, setIsExecuting] = useState(false); useEffect(() => { props.api.getSnubaDatasetNames().then((res) => { @@ -48,11 +52,7 @@ function SnubaExplain(props: { api: Client }) { } function explainQuery() { - if (isExecuting) { - window.alert("A query is already running"); - } - setIsExecuting(true); - props.api + return props.api .debugSnQLQuery(snql_query as SnQLRequest) .then((result) => { const query_result = { @@ -61,13 +61,6 @@ function SnubaExplain(props: { api: Client }) { explain: result.explain as ExplainResult, }; setQueryResultHistory((prevHistory) => [query_result, ...prevHistory]); - }) - .catch((err) => { - console.log("ERROR", err); - window.alert("An error occurred: " + err.message); - }) - .finally(() => { - setIsExecuting(false); }); } @@ -120,20 +113,13 @@ function SnubaExplain(props: { api: Client }) {
- + label="Explain Query" + />
diff --git a/snuba/admin/static/tracing/index.tsx b/snuba/admin/static/tracing/index.tsx index 21be576a00..7ef83eee66 100644 --- a/snuba/admin/static/tracing/index.tsx +++ b/snuba/admin/static/tracing/index.tsx @@ -1,16 +1,9 @@ -import React, { useEffect, useState } from "react"; +import React, { useState } from "react"; import Client from "SnubaAdmin/api_client"; import QueryDisplay from "SnubaAdmin/tracing/query_display"; -import { - LogLine, - TracingRequest, - TracingResult, - PredefinedQuery, -} from "SnubaAdmin/tracing/types"; +import { LogLine, TracingResult } from "SnubaAdmin/tracing/types"; import { parseLogLine } from "SnubaAdmin/tracing/util"; -type QueryState = Partial; - type BucketedLogs = Map>; enum MessageCategory { @@ -82,43 +75,7 @@ function NodalDisplay(props: { ); } -function FormattedNodalDisplay(props: { - header: string; - data: string[] | string | number; -}) { - const [visible, setVisible] = useState(false); - - return ( -
  • - setVisible(!visible)}> - {visible ? "[-]" : "[+]"} {props.header.split("_").join(" ")} - - -
      - {visible && - Array.isArray(props.data) && - props.data.map((log: string, log_idx: number) => { - return
    1. {log}
    2. ; - })} - {visible && - (typeof props.data === "string" || - typeof props.data === "number") &&
    3. {props.data}
    4. } -
    -
  • - ); -} - function TracingQueries(props: { api: Client }) { - const [query, setQuery] = useState({}); - const [queryResultHistory, setQueryResultHistory] = useState( - [] - ); - const [isExecuting, setIsExecuting] = useState(false); - const [predefinedQueryOptions, setPredefinedQueryOptions] = useState< - PredefinedQuery[] - >([]); - - const endpoint = "clickhouse_trace_query"; const hidden_formatted_trace_fields = new Set([ "thread_ids", "node_name", @@ -126,49 +83,6 @@ function TracingQueries(props: { api: Client }) { "storage_nodes_accessed", ]); - function formatSQL(sql: string) { - const formatted = sql - .split("\n") - .map((line) => line.substring(4, line.length)) - .join("\n"); - return formatted.trim(); - } - - function executeQuery() { - if (isExecuting) { - window.alert("A query is already running"); - } - setIsExecuting(true); - props.api - .executeTracingQuery(query as TracingRequest) - .then((result) => { - const tracing_result = { - input_query: `${query.sql}`, - timestamp: result.timestamp, - num_rows_result: result.num_rows_result, - cols: result.cols, - trace_output: result.trace_output, - formatted_trace_output: result.formatted_trace_output, - error: result.error, - }; - setQueryResultHistory((prevHistory) => [ - tracing_result, - ...prevHistory, - ]); - }) - .catch((err) => { - console.log("ERROR", err); - window.alert("An error occurred: " + err.error.message); - }) - .finally(() => { - setIsExecuting(false); - }); - } - - function copyText(text: string) { - window.navigator.clipboard.writeText(text); - } - function tablePopulator(queryResult: TracingResult, showFormatted: boolean) { var elements = {}; if (queryResult.error) { @@ -357,56 +271,6 @@ function TracingQueries(props: { api: Client }) { ); } - function formattedTraceDisplay( - title: string, - value: any - ): JSX.Element | undefined { - let node_names = Object.keys(value); - let query_node_name = ""; - for (const node_name of node_names) { - if (value[node_name]["node_type"] == "query") { - query_node_name = node_name; - } - } - return ( -
      -
    1. Query node - {query_node_name}
    2. -
        - {Object.keys(value[query_node_name]).map( - (header: string, idx: number) => { - if (!hidden_formatted_trace_fields.has(header)) { - const data = value[query_node_name][header]; - return ; - } - } - )} -
      - {node_names.map((node_name, idx) => { - if (node_name != query_node_name) { - return ( -
        -
        -
      1. Storage node - {node_name}
      2. -
          - {Object.keys(value[node_name]).map( - (header: string, idx: number) => { - if (!hidden_formatted_trace_fields.has(header)) { - const data = value[node_name][header]; - return ( - - ); - } - } - )} -
        -
      - ); - } - })} -
    - ); - } - return (
    {QueryDisplay({ @@ -418,37 +282,4 @@ function TracingQueries(props: { api: Client }) { ); } -const executeActionsStyle = { - display: "flex", - justifyContent: "space-between", - marginTop: 8, -}; - -const executeButtonStyle = { - height: 30, - border: 0, - padding: "4px 20px", -}; - -const selectStyle = { - marginRight: 8, - height: 30, -}; - -function TextArea(props: { - value: string; - onChange: (nextValue: string) => void; -}) { - const { value, onChange } = props; - return ( -