diff --git a/Makefile b/Makefile index 396c4cffbe..6f29559f45 100644 --- a/Makefile +++ b/Makefile @@ -64,14 +64,6 @@ install-brew-dev: brew bundle .PHONY: install-brew-dev -# this can go away once sentry formalizes a way of working with protobuf / grpc -protos: - @which protoc || (echo "!!! You need protoc installed in order to build protos. https://grpc.io/docs/protoc-installation/" && exit 1) - @type protoc-gen-mypy || (echo "!!! Failed, run this: pip install mypy-protobuf==3.6.0" && exit 1) - protoc --python_out=. --mypy_out=. $$(find snuba/protobufs -name '*.proto') - -.PHONY: protos - snubadocs: pip install -U -r ./docs-requirements.txt sphinx-build -W -b html docs/source docs/build diff --git a/docs/source/contributing/environment.rst b/docs/source/contributing/environment.rst index 077a0ab83b..0735425e5e 100644 --- a/docs/source/contributing/environment.rst +++ b/docs/source/contributing/environment.rst @@ -9,8 +9,20 @@ In order to set up Clickhouse, Redis, and Kafka, please refer to :doc:`/getstart Prerequisites ------------- -`pyenv `_ must be installed on your system. -It is also assumed that you have completed the steps to set up the `sentry dev environment `_. +It is assumed that you have completed the steps to set up the `sentry dev environment `_. +Install `pyenv `_ on your system using Homebrew:: + + brew install pyenv + +You may have other than Python 3.11.8 installed on your machine, but Snuba needs Python 3.11.8:: + + pyenv install 3.11.8 + +You will need an installation of Rust to develop Snuba. Go `here `_ to get one:: + + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh + +Make sure to follow installation steps and configure your shell for cargo (Rust's build sytem and package manager). If you are using Homebrew and a M1 Mac, ensure the development packages you've installed with Homebrew are available by setting these environment variables:: @@ -21,9 +33,9 @@ by setting these environment variables:: Install / Run ------------- -clone this repo into your workspace:: +Clone this repo into your workspace:: - git@github.com:getsentry/snuba.git + git clone git@github.com:getsentry/snuba.git These commands set up the Python virtual environment:: @@ -33,11 +45,11 @@ These commands set up the Python virtual environment:: pip install --upgrade pip==22.2.2 make develop -These commands start the Snuba api, which is capable of processing queries:: +This command starts the Snuba api, which is capable of processing queries:: snuba api -This command instead will start the api and all the Snuba consumers to ingest +This command starts the api and Snuba consumers to ingest data from Kafka:: snuba devserver diff --git a/requirements.txt b/requirements.txt index 5248594b1d..83172dc16c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -45,3 +45,4 @@ sqlparse==0.4.2 google-api-python-client==2.88.0 sentry-usage-accountant==0.0.10 freezegun==1.2.2 +sentry-protos==0.1.3 diff --git a/snuba/admin/clickhouse/trace_log_parsing.py b/snuba/admin/clickhouse/trace_log_parsing.py new file mode 100644 index 0000000000..faf116a958 --- /dev/null +++ b/snuba/admin/clickhouse/trace_log_parsing.py @@ -0,0 +1,275 @@ +from __future__ import annotations + +import re +from dataclasses import dataclass +from typing import Any + +# [ spans-clickhouse-1 ] [ 65011 ] {0.21246445055947638} default.spans_optimized_v2_traces (aacb1a4f-32d0-49ea-8985-9c0d92a079ae) (SelectExecutor): Index `bf_attr_str_5` has dropped 0/2199 granules. +INDEX_MATCHER_RE = re.compile( + r"(?P.*) \(.*\) \(SelectExecutor\): Index `(?P.*)` has dropped (?P\d+)/(?P\d+) granules." +) + + +@dataclass +class IndexSummary: + table_name: str + index_name: str + dropped_granules: int + total_granules: int + + @staticmethod + def from_log(log_line: str) -> IndexSummary | None: + match = INDEX_MATCHER_RE.match(log_line) + if not match: + return None + + return IndexSummary( + table_name=match.group("table_name"), + index_name=match.group("index_name"), + dropped_granules=int(match.group("dropped_granules")), + total_granules=int(match.group("total_granules")), + ) + + +# [ spans-clickhouse-1 ] [ 65011 ] {0.21246445055947638} default.spans_optimized_v2_traces (aacb1a4f-32d0-49ea-8985-9c0d92a079ae) (SelectExecutor): Selected 4/4 parts by partition key, 4 parts by primary key, 2199/2199 marks by primary key, 2031 marks to read from 22 ranges +SELECT_MATCHER_RE = re.compile( + r"(?P.*) \(.*\) \(SelectExecutor\): Selected (?P\d+)/(?P\d+) parts by partition key, (?P\d+) parts by primary key, (?P\d+)/(?P\d+) marks by primary key, (?P\d+) marks to read from \d+ ranges" +) + + +@dataclass +class SelectSummary: + table_name: str + parts_selected_by_partition_key: int + total_parts: int + parts_selected_by_primary_key: int + marks_selected_by_primary_key: int + total_marks: int + marks_to_read_from_ranges: int + + @staticmethod + def from_log(log_line: str) -> SelectSummary | None: + match = SELECT_MATCHER_RE.match(log_line) + if not match: + return None + + return SelectSummary( + table_name=match.group("table_name"), + parts_selected_by_partition_key=int(match.group("parts")), + total_parts=int(match.group("total_parts")), + parts_selected_by_primary_key=int(match.group("primary_parts")), + marks_selected_by_primary_key=int(match.group("selected_marks")), + total_marks=int(match.group("total_marks")), + marks_to_read_from_ranges=int(match.group("marks_to_read")), + ) + + +# [ spans-clickhouse-1 ] [ 65011 ] {0.21246445055947638} default.spans_optimized_v2_traces (aacb1a4f-32d0-49ea-8985-9c0d92a079ae) (SelectExecutor): Reading approx. 8286299 rows with 4 streams +STREAM_MATCHER_RE = re.compile( + r"(?P.*) \(.*\) \(SelectExecutor\): Reading approx. (?P\d+) rows with (?P\d+) streams" +) + + +@dataclass +class StreamSummary: + table_name: str + approximate_rows: int + streams: int + + @staticmethod + def from_log(log_line: str) -> StreamSummary | None: + match = STREAM_MATCHER_RE.match(log_line) + if not match: + return None + + return StreamSummary( + table_name=match.group("table_name"), + approximate_rows=int(match.group("approximate_rows")), + streams=int(match.group("streams")), + ) + + +# [ snuba-st-1-2.c.mattrobenolt-kube.internal ] [ 848231 ] {f3fb112a-583f-4125-a424-bd1d21b6ecf2} AggregatingTransform: Aggregated. 1 to 1 rows (from 17.00 B) in 0.024679052 sec. (40.520 rows/sec., 688.84 B/sec.) +AGGREGATION_MATCHER_RE = re.compile( + r"AggregatingTransform: Aggregated. (?P\d+) to (?P\d+) rows \(from (?P.*)\) in (?P[0-9.]+) sec. \((?P[0-9.]+) rows/sec., (?P.+)/sec.\)" +) + + +@dataclass +class AggregationSummary: + transform: str + before_row_count: int + after_row_count: int + memory_size: str + seconds: float + rows_per_second: float + bytes_per_second: str + + @staticmethod + def from_log(log_line: str) -> AggregationSummary | None: + match = AGGREGATION_MATCHER_RE.match(log_line) + if not match: + return None + + return AggregationSummary( + transform="AggregatingTransform", + before_row_count=int(match.group("before_row_count")), + after_row_count=int(match.group("after_row_count")), + memory_size=match.group("memory_size"), + seconds=float(match.group("seconds")), + rows_per_second=float(match.group("rows_per_second")), + bytes_per_second=match.group("bytes_per_second"), + ) + + +# [ spans-clickhouse-1 ] [ 23107 ] {0.21246445055947638} MergingSortedTransform: Merge sorted 2 blocks, 1458 rows in 0.464030493 sec., 3142.034892090594 rows/sec., 61.37 KiB/sec +SORTING_MATCHER_RE = re.compile( + r"(?PMerging.*Transform): Merge sorted (?P\d+) blocks, (?P\d+) rows in (?P[0-9.]+) sec., (?P[0-9.]+) rows/sec., (?P.+)/sec" +) + + +@dataclass +class SortingSummary: + transform: str + sorted_blocks: int + rows: int + seconds: float + rows_per_second: float + bytes_per_second: str + + @staticmethod + def from_log(log_line: str) -> SortingSummary | None: + match = SORTING_MATCHER_RE.match(log_line) + if not match: + return None + + return SortingSummary( + transform=match.group("transform"), + sorted_blocks=int(match.group("sorted_blocks")), + rows=int(match.group("rows")), + seconds=float(match.group("seconds")), + rows_per_second=float(match.group("rows_per_second")), + bytes_per_second=match.group("bytes_per_second"), + ) + + +# [ spans-clickhouse-1 ] [ 65011 ] {0.21246445055947638} executeQuery: Read 8528171 rows, 886.74 MiB in 0.472519 sec., 18048313.40115424 rows/sec., 1.83 GiB/sec. +EXECUTE_MATCHER_RE = re.compile( + r"executeQuery: Read (?P\d+) rows, (?P.+) in (?P[0-9.]+) sec., (?P[0-9.]+) rows/sec., (?P.+)/sec." +) + + +@dataclass +class ExecuteSummary: + rows_read: int + memory_size: str + seconds: float + rows_per_second: float + bytes_per_second: str + + @staticmethod + def from_log(log_line: str) -> ExecuteSummary | None: + match = EXECUTE_MATCHER_RE.match(log_line) + if not match: + return None + + return ExecuteSummary( + rows_read=int(match.group("rows_read")), + memory_size=match.group("memory_size"), + seconds=float(match.group("seconds")), + rows_per_second=float(match.group("rows_per_second")), + bytes_per_second=match.group("bytes_per_second"), + ) + + +@dataclass +class QuerySummary: + node_name: str + is_distributed: bool + query_id: str + execute_summaries: list[ExecuteSummary] | None = None + select_summaries: list[SelectSummary] | None = None + index_summaries: list[IndexSummary] | None = None + stream_summaries: list[StreamSummary] | None = None + aggregation_summaries: list[AggregationSummary] | None = None + sorting_summaries: list[SortingSummary] | None = None + + +@dataclass +class TracingSummary: + query_summaries: dict[str, QuerySummary] + + +line_types = [ + IndexSummary, + SelectSummary, + StreamSummary, + AggregationSummary, + SortingSummary, + ExecuteSummary, +] + + +def summarize_trace_output(raw_trace_logs: str) -> TracingSummary: + parsed = format_log_to_dict(raw_trace_logs) + + summary = TracingSummary({}) + query_node = parsed[0]["node_name"] + summary.query_summaries[query_node] = QuerySummary( + query_node, True, parsed[0]["query_id"] + ) + for line in parsed: + if line["node_name"] not in summary.query_summaries: + summary.query_summaries[line["node_name"]] = QuerySummary( + line["node_name"], False, line["query_id"] + ) + + query_summary = summary.query_summaries[line["node_name"]] + for line_type in line_types: + parsed_line = line_type.from_log(line["log_content"]) # type: ignore + if parsed_line is not None: + attr_name = ( + line_type.__name__.lower().replace("summary", "") + "_summaries" + ) + if getattr(query_summary, attr_name) is None: + setattr(query_summary, attr_name, [parsed_line]) + else: + getattr(query_summary, attr_name).append(parsed_line) + + return summary + + +square_re = re.compile(r"\[.*?\]") +angle_re = re.compile(r"<.*?>") +curly_re = re.compile(r"{.*?}") + + +def format_log_to_dict(raw_trace_logs: str) -> list[dict[str, Any]]: + # CLICKHOUSE TRACING LOG STRUCTURE: '[ NODE ] [ THREAD_ID ] {QUERY_ID} LOG_LINE' + formatted_logs = [] + for line in raw_trace_logs.splitlines(): + context = square_re.findall(line) + log_type = "" + log_type_regex = angle_re.search(line) + if log_type_regex is not None: + log_type = log_type_regex.group() + + query_id = "" + query_id_regex = curly_re.search(line) + if query_id_regex is not None: + query_id = query_id_regex.group() + + try: + formatted_log = { + "node_name": context[0][2:-2], + "thread_id": context[1][2:-2], + "query_id": query_id[1:-1], + "log_type": log_type, + "log_content": angle_re.split(line)[1].strip(), + } + formatted_logs.append(formatted_log) + except Exception: + # Error parsing log line, continue. + pass + + return formatted_logs diff --git a/snuba/admin/clickhouse/tracing.py b/snuba/admin/clickhouse/tracing.py index 2079b44dfb..8de64c1984 100644 --- a/snuba/admin/clickhouse/tracing.py +++ b/snuba/admin/clickhouse/tracing.py @@ -1,63 +1,26 @@ from __future__ import annotations -import re from dataclasses import dataclass -from typing import Any, Dict, List from snuba.admin.clickhouse.common import ( get_ro_query_node_connection, validate_ro_query, ) +from snuba.admin.clickhouse.trace_log_parsing import ( + TracingSummary, + summarize_trace_output, +) from snuba.clusters.cluster import ClickhouseClientSettings @dataclass class TraceOutput: trace_output: str - formatted_trace_output: Dict[str, Any] + summarized_trace_output: TracingSummary cols: list[tuple[str, str]] num_rows_result: int -@dataclass -class FormattedTrace: - query_node: QueryNodeTraceResult - storage_nodes: List[StorageNodeTraceResult] - - -class NodeTraceResult: - def __init__(self, node_name: str) -> None: - self.node_name: str = node_name - self.thread_ids: List[str] = [] - self.threads_used: int = 0 - - -class QueryNodeTraceResult(NodeTraceResult): - def __init__(self, node_name: str) -> None: - super(QueryNodeTraceResult, self).__init__(node_name) - self.node_type = "query" - self.number_of_storage_nodes_accessed: int = 0 - self.storage_nodes_accessed: List[str] = [] - self.aggregation_performance: List[str] = [] - self.read_performance: List[str] = [] - self.memory_performance: List[str] = [] - - -@dataclass -class StorageNodeTraceResult(NodeTraceResult): - def __init__(self, node_name: str) -> None: - super(StorageNodeTraceResult, self).__init__(node_name) - self.node_type = "storage" - self.key_conditions: List[str] = [] - self.skip_indexes: List[str] = [] - self.filtering_algorithm: List[str] = [] - self.selected_parts_and_marks: List[str] = [] - self.aggregation_method: List[str] = [] - self.aggregation_performance: List[str] = [] - self.read_performance: List[str] = [] - self.memory_performance: List[str] = [] - - def run_query_and_get_trace(storage_name: str, query: str) -> TraceOutput: validate_ro_query(query) connection = get_ro_query_node_connection( @@ -66,115 +29,10 @@ def run_query_and_get_trace(storage_name: str, query: str) -> TraceOutput: query_result = connection.execute( query=query, capture_trace=True, with_column_types=True ) - formatted_trace_output = format_trace_output(query_result.trace_output) + summarized_trace_output = summarize_trace_output(query_result.trace_output) return TraceOutput( trace_output=query_result.trace_output, - formatted_trace_output=formatted_trace_output, + summarized_trace_output=summarized_trace_output, cols=query_result.meta, # type: ignore num_rows_result=len(query_result.results), ) - - -LOG_MAPPINGS_FOR_QUERY_NODES = [ - ("", ["Aggregator: Merging"], "aggregation_performance"), - ("", ["Aggregator"], "aggregation_performance"), - ("", ["executeQuery"], "read_performance"), - ("", ["MemoryTracker"], "memory_performance"), -] - - -LOG_MAPPINGS_FOR_STORAGE_NODES = [ - ("", ["Key condition"], "key_conditions"), - ("", ["index condition"], "skip_indexes"), - ( - "", - ["binary search", "generic exclusion"], - "filtering_algorithm", - ), # TODO: make this generic - ("", ["granules"], "selected_parts_and_marks"), - ("", ["partition key"], "selected_parts_and_marks"), - ("", ["Aggregation method"], "aggregation_method"), - ("", ["AggregatingTransform"], "aggregation_performance"), - ("", ["executeQuery"], "read_performance"), - ("", ["MemoryTracker"], "memory_performance"), -] - - -def format_trace_output(raw_trace_logs: str) -> Dict[str, Any]: - formatted_logs = format_log_to_dict(raw_trace_logs) - - result: dict[str, Any] = {} # node_name: NodeTraceResult - - query_node_name = formatted_logs[0]["node_name"] - result[query_node_name] = QueryNodeTraceResult(query_node_name) - query_node_trace_result = result[query_node_name] - assert isinstance(query_node_trace_result, QueryNodeTraceResult) - - for log in formatted_logs: - node_name = log["node_name"] - if node_name not in result: - result[node_name] = StorageNodeTraceResult(node_name) - query_node_trace_result.storage_nodes_accessed.append(node_name) - query_node_trace_result.number_of_storage_nodes_accessed += 1 - - trace_result = result[node_name] - assert isinstance(trace_result, NodeTraceResult) - if log["thread_id"] not in trace_result.thread_ids: - trace_result.thread_ids.append(log["thread_id"]) - trace_result.threads_used = len(trace_result.thread_ids) - - if node_name == query_node_name: - assert isinstance(trace_result, QueryNodeTraceResult) - for log_type, search_strs, trace_attr in LOG_MAPPINGS_FOR_QUERY_NODES: - find_and_add_log_line( - log, getattr(trace_result, trace_attr), log_type, search_strs - ) - else: - assert isinstance(trace_result, StorageNodeTraceResult) - for ( - log_type, - search_strs, - trace_attr, - ) in LOG_MAPPINGS_FOR_STORAGE_NODES: - find_and_add_log_line( - log, getattr(trace_result, trace_attr), log_type, search_strs - ) - for key, value in result.items(): - result[key] = vars(value) - return result - - -def find_and_add_log_line( - log: Dict[str, Any], - trace_result_data: List[str], - log_type: str, - search_strs: List[str], -) -> None: - for search_str in search_strs: - if log["log_type"] == log_type and search_str in log["log_content"]: - trace_result_data.append(log["log_content"]) - - -def format_log_to_dict(raw_trace_logs: str) -> List[dict[str, Any]]: - # CLICKHOUSE TRACING LOG STRUCTURE: '[ NODE ] [ THREAD_ID ] {QUERY_ID} LOG_LINE' - formatted_logs = [] - for line in raw_trace_logs.splitlines(): - context = re.findall(r"\[.*?\]", line) - log_type = "" - log_type_regex = re.search(r"<.*?>", line) - if log_type_regex is not None: - log_type = log_type_regex.group() - - try: - formatted_log = { - "node_name": context[0][2:-2], - "thread_id": context[1][2:-2], - "log_type": log_type, - "log_content": re.split(r"<.*?>", line)[1].strip(), - } - except Exception: - # Error parsing log line, continue. - pass - - formatted_logs.append(formatted_log) - return formatted_logs diff --git a/snuba/admin/static/cardinality_analyzer/query_display.tsx b/snuba/admin/static/cardinality_analyzer/query_display.tsx index b5be9c6ed0..861d8d3c25 100644 --- a/snuba/admin/static/cardinality_analyzer/query_display.tsx +++ b/snuba/admin/static/cardinality_analyzer/query_display.tsx @@ -3,8 +3,13 @@ import Client from "SnubaAdmin/api_client"; import { Collapse } from "SnubaAdmin/collapse"; import { CSV } from "SnubaAdmin/cardinality_analyzer/CSV"; import QueryEditor from "SnubaAdmin/query_editor"; +import ExecuteButton from "SnubaAdmin/utils/execute_button"; -import { CardinalityQueryRequest, CardinalityQueryResult, PredefinedQuery } from "./types"; +import { + CardinalityQueryRequest, + CardinalityQueryResult, + PredefinedQuery, +} from "SnubaAdmin/cardinality_analyzer/types"; enum ClipboardFormats { CSV = "csv", @@ -18,7 +23,9 @@ function QueryDisplay(props: { predefinedQueryOptions: Array; }) { const [query, setQuery] = useState({}); - const [queryResultHistory, setCardinalityQueryResultHistory] = useState([]); + const [queryResultHistory, setCardinalityQueryResultHistory] = useState< + CardinalityQueryResult[] + >([]); function updateQuerySql(sql: string) { setQuery((prevQuery) => { @@ -29,25 +36,16 @@ function QueryDisplay(props: { }); } - function executeQuery() { - props.api - .executeCardinalityQuery(query as CardinalityQueryRequest) - .then((result) => { - result.input_query = query.sql || ""; - setCardinalityQueryResultHistory((prevHistory) => [result, ...prevHistory]); - }) - .catch((err) => { - console.log("ERROR", err); - window.alert("An error occurred: " + err.error.message); - }); - } - function convertResultsToCSV(queryResult: CardinalityQueryResult) { return CSV.sheet([queryResult.column_names, ...queryResult.rows]); } - function copyText(queryResult: CardinalityQueryResult, format: ClipboardFormats) { - let formatter: (input: CardinalityQueryResult) => string = (s) => s.toString(); + function copyText( + queryResult: CardinalityQueryResult, + format: ClipboardFormats + ) { + let formatter: (input: CardinalityQueryResult) => string = (s) => + s.toString(); if (format === ClipboardFormats.JSON) { formatter = JSON.stringify; @@ -60,6 +58,18 @@ function QueryDisplay(props: { window.navigator.clipboard.writeText(formatter(queryResult)); } + function executeQuery() { + return props.api + .executeCardinalityQuery(query as CardinalityQueryRequest) + .then((result) => { + result.input_query = query.sql || ""; + setCardinalityQueryResultHistory((prevHistory) => [ + result, + ...prevHistory, + ]); + }); + } + return (

Construct a Metrics Query

@@ -70,18 +80,7 @@ function QueryDisplay(props: { predefinedQueryOptions={props.predefinedQueryOptions} />
-
- -
+

Query results

@@ -91,12 +90,18 @@ function QueryDisplay(props: {

{queryResult.input_query}

-

-

@@ -107,10 +112,16 @@ function QueryDisplay(props: { return ( - - {props.resultDataPopulator(queryResult)} @@ -128,7 +139,7 @@ const executeActionsStyle = { marginTop: 8, }; -const executeButtonStyle = { +const copyButtonStyle = { height: 30, border: 0, padding: "4px 20px", diff --git a/snuba/admin/static/clickhouse_queries/query_display.tsx b/snuba/admin/static/clickhouse_queries/query_display.tsx index df1802836c..e3136a55c5 100644 --- a/snuba/admin/static/clickhouse_queries/query_display.tsx +++ b/snuba/admin/static/clickhouse_queries/query_display.tsx @@ -2,6 +2,7 @@ import React, { useEffect, useState } from "react"; import Client from "SnubaAdmin/api_client"; import { Collapse } from "SnubaAdmin/collapse"; import QueryEditor from "SnubaAdmin/query_editor"; +import ExecuteButton from "SnubaAdmin/utils/execute_button"; import { Prism } from "@mantine/prism"; import { RichTextEditor } from "@mantine/tiptap"; @@ -71,15 +72,11 @@ function QueryDisplay(props: { } function executeQuery() { - props.api + return props.api .executeSystemQuery(query as QueryRequest) .then((result) => { result.input_query = `${query.sql} (${query.storage},${query.host}:${query.port})`; setQueryResultHistory((prevHistory) => [result, ...prevHistory]); - }) - .catch((err) => { - console.log("ERROR", err); - window.alert("An error occurred: " + err.error); }); } @@ -167,18 +164,12 @@ function QueryDisplay(props: {
- + />
diff --git a/snuba/admin/static/production_queries/index.tsx b/snuba/admin/static/production_queries/index.tsx index cedcb27eab..199cdd022c 100644 --- a/snuba/admin/static/production_queries/index.tsx +++ b/snuba/admin/static/production_queries/index.tsx @@ -1,7 +1,12 @@ import React, { useEffect, useState } from "react"; import Client from "SnubaAdmin/api_client"; import { Table } from "SnubaAdmin/table"; -import { QueryResult, QueryResultColumnMeta, SnQLRequest } from "SnubaAdmin/production_queries/types"; +import { + QueryResult, + QueryResultColumnMeta, + SnQLRequest, +} from "SnubaAdmin/production_queries/types"; +import ExecuteButton from "SnubaAdmin/utils/execute_button"; import { executeActionsStyle } from "SnubaAdmin/production_queries/styles"; import { Accordion, @@ -16,7 +21,7 @@ import { Textarea, } from "@mantine/core"; import { useDisclosure } from "@mantine/hooks"; -import { CSV } from "../cardinality_analyzer/CSV"; +import { CSV } from "SnubaAdmin/cardinality_analyzer/CSV"; function ProductionQueries(props: { api: Client }) { const [datasets, setDatasets] = useState([]); @@ -25,7 +30,6 @@ function ProductionQueries(props: { api: Client }) { const [queryResultHistory, setQueryResultHistory] = useState( [] ); - const [isExecuting, setIsExecuting] = useState(false); useEffect(() => { props.api.getSnubaDatasetNames().then((res) => { @@ -58,11 +62,7 @@ function ProductionQueries(props: { api: Client }) { } function executeQuery() { - if (isExecuting) { - window.alert("A query is already running"); - } - setIsExecuting(true); - props.api + return props.api .executeSnQLQuery(snql_query as SnQLRequest) .then((result) => { const result_columns = result.meta.map( @@ -80,13 +80,6 @@ function ProductionQueries(props: { api: Client }) { quota_allowance: result.quota_allowance, }; setQueryResultHistory((prevHistory) => [query_result, ...prevHistory]); - }) - .catch((err) => { - console.log("ERROR", err); - window.alert("An error occurred: " + err.message); - }) - .finally(() => { - setIsExecuting(false); }); } @@ -116,15 +109,12 @@ function ProductionQueries(props: { api: Client }) { />
- + />
@@ -225,9 +215,9 @@ function renderThrottleStatus(isThrottled: boolean, reasonHeader: string[]) { > Quota Allowance - Throttled
    - {reasonHeader.map((line, index) => ( + {reasonHeader.map((line, index) => (
  1. {line}
  2. - ))} + ))}
) : ( @@ -261,8 +251,9 @@ function QueryResultQuotaAllowance(props: { queryResult: QueryResult }) { const isThrottled: boolean = (props.queryResult.quota_allowance && Object.values(props.queryResult.quota_allowance).some( - (policy) => policy.max_threads < 10, - )) || false; + (policy) => policy.max_threads < 10 + )) || + false; let reasonHeader: string[] = []; if (isThrottled) { props.queryResult.quota_allowance && @@ -270,8 +261,12 @@ function QueryResultQuotaAllowance(props: { queryResult: QueryResult }) { const policy = props.queryResult.quota_allowance![policyName]; if (policy.max_threads < 10 && policy.explanation.reason != null) { reasonHeader.push( - policyName + ": " + policy.explanation.reason + - ". SnQL Query executed with " + policy.max_threads + " threads.", + policyName + + ": " + + policy.explanation.reason + + ". SnQL Query executed with " + + policy.max_threads + + " threads." ); } }); @@ -282,9 +277,7 @@ function QueryResultQuotaAllowance(props: { queryResult: QueryResult }) { {renderThrottleStatus(isThrottled, reasonHeader)} - - {renderPolicyDetails(props)} - + {renderPolicyDetails(props)} ); diff --git a/snuba/admin/static/querylog/query_display.tsx b/snuba/admin/static/querylog/query_display.tsx index 12b304465b..f1a45dd195 100644 --- a/snuba/admin/static/querylog/query_display.tsx +++ b/snuba/admin/static/querylog/query_display.tsx @@ -1,9 +1,11 @@ import React, { useState } from "react"; +import { Button } from "@mantine/core"; import Client from "SnubaAdmin/api_client"; import { Collapse } from "SnubaAdmin/collapse"; import QueryEditor from "SnubaAdmin/query_editor"; import { QuerylogRequest, QuerylogResult, PredefinedQuery } from "./types"; +import ExecuteButton from "SnubaAdmin/utils/execute_button"; type QueryState = Partial; @@ -27,15 +29,11 @@ function QueryDisplay(props: { } function executeQuery() { - props.api + return props.api .executeQuerylogQuery(query as QuerylogRequest) .then((result) => { result.input_query = query.sql || ""; setQueryResultHistory((prevHistory) => [result, ...prevHistory]); - }) - .catch((err) => { - console.log("ERROR", err); - window.alert("An error occurred: " + err.error.message); }); } @@ -79,25 +77,15 @@ function QueryDisplay(props: { />
- - +
diff --git a/snuba/admin/static/snql_to_sql/index.tsx b/snuba/admin/static/snql_to_sql/index.tsx index a1aea12250..2cc5b06833 100644 --- a/snuba/admin/static/snql_to_sql/index.tsx +++ b/snuba/admin/static/snql_to_sql/index.tsx @@ -2,8 +2,13 @@ import React, { useEffect, useState } from "react"; import Client from "SnubaAdmin/api_client"; import { Table } from "SnubaAdmin/table"; -import { executeActionsStyle, selectStyle, executeButtonStyle } from "SnubaAdmin/snql_to_sql/styles"; +import { + executeActionsStyle, + selectStyle, + executeButtonStyle, +} from "SnubaAdmin/snql_to_sql/styles"; import { TextArea } from "SnubaAdmin/snql_to_sql/utils"; +import ExecuteButton from "SnubaAdmin/utils/execute_button"; import { SnQLRequest, SnQLResult, @@ -17,7 +22,6 @@ function SnQLToSQL(props: { api: Client }) { const [queryResultHistory, setQueryResultHistory] = useState( [] ); - const [isExecuting, setIsExecuting] = useState(false); useEffect(() => { props.api.getSnubaDatasetNames().then((res) => { @@ -44,11 +48,7 @@ function SnQLToSQL(props: { api: Client }) { } function convertQuery() { - if (isExecuting) { - window.alert("A query is already running"); - } - setIsExecuting(true); - props.api + return props.api .debugSnQLQuery(snql_query as SnQLRequest) .then((result) => { const query_result = { @@ -56,13 +56,6 @@ function SnQLToSQL(props: { api: Client }) { sql: result.sql, }; setQueryResultHistory((prevHistory) => [query_result, ...prevHistory]); - }) - .catch((err) => { - console.log("ERROR", err); - window.alert("An error occurred: " + err.message); - }) - .finally(() => { - setIsExecuting(false); }); } @@ -91,17 +84,13 @@ function SnQLToSQL(props: { api: Client }) {
- + label="Convert Query" + />
diff --git a/snuba/admin/static/snuba_explain/index.tsx b/snuba/admin/static/snuba_explain/index.tsx index cc4d254cb7..e9d8e3e0cd 100644 --- a/snuba/admin/static/snuba_explain/index.tsx +++ b/snuba/admin/static/snuba_explain/index.tsx @@ -4,13 +4,18 @@ import { Prism } from "@mantine/prism"; import Client from "SnubaAdmin/api_client"; import QueryEditor from "SnubaAdmin/query_editor"; import { Collapse } from "SnubaAdmin/collapse"; -import { SnQLRequest, SnQLResult, ExplainResult, ExplainStep } from "SnubaAdmin/snuba_explain/types"; +import { + SnQLRequest, + SnQLResult, + ExplainResult, + ExplainStep, +} from "SnubaAdmin/snuba_explain/types"; import { Step } from "SnubaAdmin/snuba_explain/step_render"; +import ExecuteButton from "SnubaAdmin/utils/execute_button"; import { executeActionsStyle, selectStyle, - executeButtonStyle, collapsibleStyle, } from "SnubaAdmin/snuba_explain/styles"; import { SnubaDatasetName, SnQLQueryState } from "SnubaAdmin/snql_to_sql/types"; @@ -21,7 +26,6 @@ function SnubaExplain(props: { api: Client }) { const [queryResultHistory, setQueryResultHistory] = useState( [] ); - const [isExecuting, setIsExecuting] = useState(false); useEffect(() => { props.api.getSnubaDatasetNames().then((res) => { @@ -48,11 +52,7 @@ function SnubaExplain(props: { api: Client }) { } function explainQuery() { - if (isExecuting) { - window.alert("A query is already running"); - } - setIsExecuting(true); - props.api + return props.api .debugSnQLQuery(snql_query as SnQLRequest) .then((result) => { const query_result = { @@ -61,13 +61,6 @@ function SnubaExplain(props: { api: Client }) { explain: result.explain as ExplainResult, }; setQueryResultHistory((prevHistory) => [query_result, ...prevHistory]); - }) - .catch((err) => { - console.log("ERROR", err); - window.alert("An error occurred: " + err.message); - }) - .finally(() => { - setIsExecuting(false); }); } @@ -120,20 +113,13 @@ function SnubaExplain(props: { api: Client }) {
- + label="Explain Query" + />
diff --git a/snuba/admin/static/tests/tracing/index.spec.tsx b/snuba/admin/static/tests/tracing/index.spec.tsx index fae7e8a3d2..d876b546cb 100644 --- a/snuba/admin/static/tests/tracing/index.spec.tsx +++ b/snuba/admin/static/tests/tracing/index.spec.tsx @@ -46,7 +46,7 @@ it("select executor rows should appear", async () => { target: { value: "Foo" }, }); - const submitButton = screen.getByText("Execute query"); + const submitButton = screen.getByText("Execute Query"); expect(submitButton.getAttribute("disabled")).toBeFalsy(); fireEvent.click(submitButton); diff --git a/snuba/admin/static/tests/utils/execute_button.spec.tsx b/snuba/admin/static/tests/utils/execute_button.spec.tsx new file mode 100644 index 0000000000..c1ec9094ce --- /dev/null +++ b/snuba/admin/static/tests/utils/execute_button.spec.tsx @@ -0,0 +1,51 @@ +import Nav from "SnubaAdmin/nav"; +import Client from "SnubaAdmin/api_client"; +import React from "react"; +import { it, expect, jest, afterEach } from "@jest/globals"; +import { + render, + act, + waitFor, + screen, + fireEvent, +} from "@testing-library/react"; +import { AllowedTools } from "SnubaAdmin/types"; +import ExecuteButton from "SnubaAdmin/utils/execute_button"; + +it("should call onClick", async () => { + let mockCall = jest.fn<() => Promise>().mockResolvedValueOnce({}); + + render(); + + const button = screen.getByRole("button"); + fireEvent.click(button); + + await waitFor(() => expect(mockCall).toBeCalledTimes(1)); +}); + +it("should not call if disabled", async () => { + let mockCall = jest.fn( + () => new Promise((resolve) => setTimeout(resolve, 1000)) + ); + + render(); + + const button = screen.getByRole("button"); + fireEvent.click(button); + + await waitFor(() => expect(mockCall).toBeCalledTimes(0)); +}); + +it("should not call if loading", async () => { + let mockCall = jest.fn( + () => new Promise((resolve) => setTimeout(resolve, 1000)) + ); + + render(); + + const button = screen.getByRole("button"); + fireEvent.click(button); + fireEvent.click(button); + + await waitFor(() => expect(mockCall).toBeCalledTimes(1)); +}); diff --git a/snuba/admin/static/tracing/index.tsx b/snuba/admin/static/tracing/index.tsx index 0e154e18ac..b46adcd290 100644 --- a/snuba/admin/static/tracing/index.tsx +++ b/snuba/admin/static/tracing/index.tsx @@ -1,15 +1,20 @@ -import React, { useEffect, useState } from "react"; +import React, { useState } from "react"; +import { Accordion, Stack, Title, Text, Group } from "@mantine/core"; + import Client from "SnubaAdmin/api_client"; import QueryDisplay from "SnubaAdmin/tracing/query_display"; import { LogLine, - TracingRequest, TracingResult, - PredefinedQuery, + TracingSummary, + QuerySummary, + ExecuteSummary, + SelectSummary, + IndexSummary, + StreamSummary, + AggregationSummary, + SortingSummary, } from "SnubaAdmin/tracing/types"; -import { parseLogLine } from "SnubaAdmin/tracing/util"; - -type QueryState = Partial; type BucketedLogs = Map>; @@ -50,125 +55,7 @@ function getMessageCategory(logLine: LogLine): MessageCategory { } } -function NodalDisplay(props: { - host: string; - category: MessageCategory; - title?: string; - logsBucketed: BucketedLogs; -}) { - const [visible, setVisible] = useState(false); - - const nodeKey = props.host + "-" + props.category; - return ( -
  • - setVisible(!visible)}> - {visible ? "[-]" : "[+]"} {props.title} - - -
      - {visible && - props.logsBucketed - .get(props.host) - ?.get(props.category) - ?.map((line, index) => { - return ( -
    1. - [{line?.log_level}] {line?.component}: {line?.message} -
    2. - ); - })} -
    -
  • - ); -} - -function FormattedNodalDisplay(props: { - header: string; - data: string[] | string | number; -}) { - const [visible, setVisible] = useState(false); - - return ( -
  • - setVisible(!visible)}> - {visible ? "[-]" : "[+]"} {props.header.split("_").join(" ")} - - -
      - {visible && - Array.isArray(props.data) && - props.data.map((log: string, log_idx: number) => { - return
    1. {log}
    2. ; - })} - {visible && - (typeof props.data === "string" || - typeof props.data === "number") &&
    3. {props.data}
    4. } -
    -
  • - ); -} - function TracingQueries(props: { api: Client }) { - const [query, setQuery] = useState({}); - const [queryResultHistory, setQueryResultHistory] = useState( - [] - ); - const [isExecuting, setIsExecuting] = useState(false); - const [predefinedQueryOptions, setPredefinedQueryOptions] = useState< - PredefinedQuery[] - >([]); - - const endpoint = "clickhouse_trace_query"; - const hidden_formatted_trace_fields = new Set([ - "thread_ids", - "node_name", - "node_type", - "storage_nodes_accessed", - ]); - - function formatSQL(sql: string) { - const formatted = sql - .split("\n") - .map((line) => line.substring(4, line.length)) - .join("\n"); - return formatted.trim(); - } - - function executeQuery() { - if (isExecuting) { - window.alert("A query is already running"); - } - setIsExecuting(true); - props.api - .executeTracingQuery(query as TracingRequest) - .then((result) => { - const tracing_result = { - input_query: `${query.sql}`, - timestamp: result.timestamp, - num_rows_result: result.num_rows_result, - cols: result.cols, - trace_output: result.trace_output, - formatted_trace_output: result.formatted_trace_output, - error: result.error, - }; - setQueryResultHistory((prevHistory) => [ - tracing_result, - ...prevHistory, - ]); - }) - .catch((err) => { - console.log("ERROR", err); - window.alert("An error occurred: " + err.error.message); - }) - .finally(() => { - setIsExecuting(false); - }); - } - - function copyText(text: string) { - window.navigator.clipboard.writeText(text); - } - function tablePopulator(queryResult: TracingResult, showFormatted: boolean) { var elements = {}; if (queryResult.error) { @@ -198,13 +85,13 @@ function TracingQueries(props: { api: Client }) { ); } else if (title === "Trace") { - if (!showFormatted) { + if (showFormatted) { return (

    Number of rows in result set: {value.num_rows_result}
    - {heirarchicalRawTraceDisplay(title, value.trace_output)} + {summarizedTraceDisplay(value.summarized_trace_output)}
    ); } else { @@ -213,7 +100,7 @@ function TracingQueries(props: { api: Client }) {
    Number of rows in result set: {value.num_rows_result}
    - {formattedTraceDisplay(title, value.formatted_trace_output)} + {rawTraceDisplay(title, value.trace_output)} ); } @@ -223,171 +110,160 @@ function TracingQueries(props: { api: Client }) { ); } - function heirarchicalRawTraceDisplay( - title: string, - value: any - ): JSX.Element | undefined { - /* - query execution flow: - [high-level query node] - [housekeeping] (access control, parsing) - [propagation step] - [for each storage node] - [housekeeping] - [select executor + MergeTreeSelectProcessor] - [aggregating transform] - [memory tracker] - [aggregating transform] - [memory tracker] - */ - const parsedLines: Array = value - .split(/\n/) - .map(parseLogLine) - .filter((x: LogLine | null) => x != null); + function rawTraceDisplay(title: string, value: any): JSX.Element { + const parsedLines: Array = value.split(/\n/); - // logsBucketed maps host -> (category -> logs) - const logsBucketed: BucketedLogs = new Map(); + return ( +
      + {parsedLines.map((line, index) => { + return ( +
    1. + {line} +
    2. + ); + })} +
    + ); + } - const orderedHosts: string[] = []; - parsedLines.forEach((line) => { - if (!orderedHosts.includes(line.host)) { - orderedHosts.push(line.host); - } - if (logsBucketed.has(line.host)) { - const hostLogs = logsBucketed.get(line.host); - if (hostLogs?.has(getMessageCategory(line))) { - hostLogs.get(getMessageCategory(line))?.push(line); - } else { - hostLogs?.set(getMessageCategory(line), [line]); - } - } else { - logsBucketed.set( - line.host, - new Map([ - [getMessageCategory(line), [line]], - ]) - ); - } - }); + function indexSummary(value: IndexSummary): JSX.Element { + return ( + + {value.table_name}: + + Index `{value.index_name}` has dropped {value.dropped_granules}/ + {value.total_granules} granules. + + + ); + } - let rootHost = orderedHosts[0]; + function selectSummary(value: SelectSummary): JSX.Element { + return ( + + + {value.table_name}: + + Selected {value.parts_selected_by_partition_key}/{value.total_parts}{" "} + parts by partition key + + + + {value.table_name}: + + Primary Key selected {value.parts_selected_by_primary_key} parts,{" "} + {value.marks_selected_by_primary_key}/{value.total_marks} marks,{" "} + {value.marks_to_read_from_ranges} total marks to process + + + + ); + } - const CATEGORIES_ORDERED = [ - MessageCategory.housekeeping, - MessageCategory.select_execution, - MessageCategory.aggregation, - MessageCategory.memory_tracker, - ]; - const CATEGORY_HEADERS = new Map([ - [MessageCategory.housekeeping, "Housekeeping"], - [MessageCategory.select_execution, "Select execution"], - [MessageCategory.aggregation, "Aggregation"], - [MessageCategory.memory_tracker, "Memory Tracking"], - ]); + function streamSummary(value: StreamSummary): JSX.Element { + return ( + + {value.table_name}: + Processing granules using {value.streams} threads + + ); + } + function aggregationSummary(value: AggregationSummary): JSX.Element { return ( -
      -
    1. Query node - {rootHost}
    2. -
    3. -
        - -
      1. - Storage nodes -
          - {orderedHosts.slice(1).map((host) => { - return ( -
        1. - Storage node - {host} - {CATEGORIES_ORDERED.map((category) => { - return ( -
            - -
          - ); - })} -
        2. - ); - })} -
        -
      2. - - -
      -
    4. -
    + + Aggregated {value.before_row_count} to {value.after_row_count} rows + (from {value.memory_size}) in {value.seconds} sec. + + ); + } + + function sortingSummary(value: SortingSummary): JSX.Element { + return ( + + Merge sorted {value.sorted_blocks} blocks, {value.rows} rows in{" "} + {value.seconds} sec. + + ); + } + + function executeSummary(value: ExecuteSummary): JSX.Element { + return ( + + Read {value.rows_read} rows, {value.memory_size} in {value.seconds}{" "} + sec., {value.rows_per_second} rows/sec., {value.bytes_per_second}/sec. + + ); + } + + function querySummary(value: QuerySummary): JSX.Element { + const execute = value.execute_summaries[0]; + const dist = value.is_distributed ? " (Distributed)" : ""; + const index_summaries = value.index_summaries + ? value.index_summaries.map((s) => indexSummary(s)) + : null; + const select_summaries = value.select_summaries + ? value.select_summaries.map((s) => selectSummary(s)) + : null; + const stream_summaries = value.stream_summaries + ? value.stream_summaries.map((s) => streamSummary(s)) + : null; + const show_filtering = + index_summaries || select_summaries || stream_summaries; + const aggregation_summaries = value.aggregation_summaries + ? value.aggregation_summaries.map((s) => aggregationSummary(s)) + : null; + const sorting_summaries = value.sorting_summaries + ? value.sorting_summaries.map((s) => sortingSummary(s)) + : null; + const show_aggregating = aggregation_summaries || sorting_summaries; + return ( + + + + {value.node_name} {dist}: {execute.seconds} sec. + + + + + {show_filtering ? Filtering : null} + {index_summaries} + {select_summaries} + {stream_summaries} + {show_aggregating ? Aggregating : null} + {aggregation_summaries} + {sorting_summaries} + Total + {value.execute_summaries.map((e) => executeSummary(e))} + + + ); } - function formattedTraceDisplay( - title: string, - value: any + function summarizedTraceDisplay( + value: TracingSummary ): JSX.Element | undefined { - let node_names = Object.keys(value); - let query_node_name = ""; - for (const node_name of node_names) { - if (value[node_name]["node_type"] == "query") { - query_node_name = node_name; + let dist_node; + let nodes = []; + for (const [host, summary] of Object.entries(value.query_summaries)) { + if (summary.is_distributed) { + dist_node = summary; + } else { + nodes.push(summary); } } return ( -
      -
    1. Query node - {query_node_name}
    2. -
        - {Object.keys(value[query_node_name]).map( - (header: string, idx: number) => { - if (!hidden_formatted_trace_fields.has(header)) { - const data = value[query_node_name][header]; - return ; - } - } - )} -
      - {node_names.map((node_name, idx) => { - if (node_name != query_node_name) { - return ( -
        -
        -
      1. Storage node - {node_name}
      2. -
          - {Object.keys(value[node_name]).map( - (header: string, idx: number) => { - if (!hidden_formatted_trace_fields.has(header)) { - const data = value[node_name][header]; - return ( - - ); - } - } - )} -
        -
      - ); - } - })} -
    + + + {querySummary(dist_node as QuerySummary)} + + + {nodes + .filter((q: QuerySummary) => !q.is_distributed) + .map((q: QuerySummary) => querySummary(q))} + + ); } @@ -402,37 +278,4 @@ function TracingQueries(props: { api: Client }) { ); } -const executeActionsStyle = { - display: "flex", - justifyContent: "space-between", - marginTop: 8, -}; - -const executeButtonStyle = { - height: 30, - border: 0, - padding: "4px 20px", -}; - -const selectStyle = { - marginRight: 8, - height: 30, -}; - -function TextArea(props: { - value: string; - onChange: (nextValue: string) => void; -}) { - const { value, onChange } = props; - return ( -