From d9b40f2474d5c79fb2d0f787dd6e82d077b406da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Wed, 4 Dec 2024 17:43:52 -0300 Subject: [PATCH 01/26] Add proof of concept --- replay/src/benchmark.rs | 50 +++++++++++++++++------------------------ replay/src/main.rs | 19 ++++++++++++---- 2 files changed, 35 insertions(+), 34 deletions(-) diff --git a/replay/src/benchmark.rs b/replay/src/benchmark.rs index 4ea93ae..af3362c 100644 --- a/replay/src/benchmark.rs +++ b/replay/src/benchmark.rs @@ -1,9 +1,10 @@ -use std::time::Instant; +use std::{error::Error, io, path::Path}; use blockifier::{ context::BlockContext, execution::contract_class::RunnableCompiledClass, state::{cached_state::CachedState, state_api::StateReader}, + transaction::objects::TransactionExecutionInfo, }; use rpc_state_reader::{ execution::{execute_tx_with_blockifier, fetch_block_context}, @@ -11,7 +12,6 @@ use rpc_state_reader::{ reader::{RpcChain, RpcStateReader}, }; use starknet_api::{block::BlockNumber, hash::StarkHash, transaction::TransactionHash}; -use tracing::{error, info, info_span}; pub type BlockCachedData = ( CachedState>, @@ -58,15 +58,13 @@ pub fn fetch_block_range_data( /// Executes the given block range, discarding any state changes applied to it /// /// Can also be used to fill up the cache -pub fn execute_block_range(block_range_data: &mut Vec) { +pub fn execute_block_range( + block_range_data: &mut Vec, +) -> Vec { + let mut executions = Vec::new(); + for (state, block_context, transactions) in block_range_data { // For each block - let _block_span = info_span!( - "block execution", - block_number = block_context.block_info().block_number.0, - ) - .entered(); - info!("starting block execution"); // The transactional state is used to execute a transaction while discarding state changes applied to it. let mut transactional_state = CachedState::create_transactional(state); @@ -77,34 +75,26 @@ pub fn execute_block_range(block_range_data: &mut Vec) { } in transactions { // Execute each transaction - let _tx_span = info_span!("tx execution",).entered(); - - info!("tx execution started"); - - let pre_execution_instant = Instant::now(); - let result = execute_tx_with_blockifier( + let execution = execute_tx_with_blockifier( &mut transactional_state, block_context.clone(), transaction.to_owned(), transaction_hash.to_owned(), ); - let execution_time = pre_execution_instant.elapsed(); - - match result { - Ok(info) => { - info!( - time = ?execution_time, - succeeded = info.revert_error.is_none(), - "tx execution finished" - ) - } - Err(_) => error!( - time = ?execution_time, - "tx execution failed" - ), - } + let Ok(execution) = execution else { continue }; + + executions.push(execution); } } + + executions +} + +pub fn save_executions( + file: &Path, + executions: Vec, +) -> Result<(), Box> { + Ok(()) } pub fn fetch_transaction_data(tx: &str, block: BlockNumber, chain: RpcChain) -> BlockCachedData { diff --git a/replay/src/main.rs b/replay/src/main.rs index 8f0934e..1f68ae0 100644 --- a/replay/src/main.rs +++ b/replay/src/main.rs @@ -17,7 +17,10 @@ use tracing_subscriber::{util::SubscriberInitExt, EnvFilter}; #[cfg(feature = "benchmark")] use { - crate::benchmark::{execute_block_range, fetch_block_range_data, fetch_transaction_data}, + crate::benchmark::{ + execute_block_range, fetch_block_range_data, fetch_transaction_data, save_executions, + }, + std::path::Path, std::{ops::Div, time::Instant}, }; @@ -176,13 +179,21 @@ fn main() { { let _benchmark_span = info_span!("benchmarking block range").entered(); - let before_execution = Instant::now(); + let mut executions = Vec::new(); + + info!("executing block range"); + let before_execution = Instant::now(); for _ in 0..number_of_runs { - execute_block_range(&mut block_range_data); + executions.push(execute_block_range(&mut block_range_data)); } - let execution_time = before_execution.elapsed(); + + info!("saving execution info"); + let execution = executions.into_iter().flatten().collect::>(); + save_executions(Path::new("executions.json"), execution) + .expect("failed to save execution info"); + let total_run_time = execution_time.as_secs_f64(); let average_run_time = total_run_time.div(number_of_runs as f64); info!( From b5ede278a71cad95c35bd283f01793e0e830913c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Wed, 4 Dec 2024 18:02:59 -0300 Subject: [PATCH 02/26] Add same proof of concept to BenchTx --- replay/src/main.rs | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/replay/src/main.rs b/replay/src/main.rs index 1f68ae0..5d28767 100644 --- a/replay/src/main.rs +++ b/replay/src/main.rs @@ -244,13 +244,21 @@ fn main() { { let _benchmark_span = info_span!("benchmarking transaction").entered(); - let before_execution = Instant::now(); + let mut executions = Vec::new(); + + info!("executing block range"); + let before_execution = Instant::now(); for _ in 0..number_of_runs { - execute_block_range(&mut block_range_data); + executions.push(execute_block_range(&mut block_range_data)); } - let execution_time = before_execution.elapsed(); + + info!("saving execution info"); + let execution = executions.into_iter().flatten().collect::>(); + save_executions(Path::new("executions.json"), execution) + .expect("failed to save execution info"); + let total_run_time = execution_time.as_secs_f64(); let average_run_time = total_run_time.div(number_of_runs as f64); info!( From d12ed7ba78bc93e24cc3dadc5d1b599e019b2cdb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Wed, 4 Dec 2024 18:10:28 -0300 Subject: [PATCH 03/26] Implement save_executions in benchmark feature --- replay/Cargo.toml | 2 +- replay/src/benchmark.rs | 66 ++++++++++++++++++++++++++++++++++++++--- 2 files changed, 63 insertions(+), 5 deletions(-) diff --git a/replay/Cargo.toml b/replay/Cargo.toml index cb98eb9..85bc407 100644 --- a/replay/Cargo.toml +++ b/replay/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2021" [features] -benchmark = [] +benchmark = ["dep:serde", "dep:serde_json", "dep:serde_with"] # The only_cairo_vm feature is designed to avoid executing transitions with cairo_native and instead use cairo_vm exclusively only_cairo_vm = ["rpc-state-reader/only_casm"] structured_logging = [] diff --git a/replay/src/benchmark.rs b/replay/src/benchmark.rs index af3362c..b6702b3 100644 --- a/replay/src/benchmark.rs +++ b/replay/src/benchmark.rs @@ -1,8 +1,10 @@ -use std::{error::Error, io, path::Path}; +use std::{error::Error, fs::File, path::Path, time::Duration}; use blockifier::{ context::BlockContext, - execution::contract_class::RunnableCompiledClass, + execution::{ + call_info::CallInfo, contract_class::RunnableCompiledClass, entry_point::CallEntryPoint, + }, state::{cached_state::CachedState, state_api::StateReader}, transaction::objects::TransactionExecutionInfo, }; @@ -11,7 +13,10 @@ use rpc_state_reader::{ objects::TransactionWithHash, reader::{RpcChain, RpcStateReader}, }; -use starknet_api::{block::BlockNumber, hash::StarkHash, transaction::TransactionHash}; +use serde::Serialize; +use starknet_api::{ + block::BlockNumber, core::ClassHash, hash::StarkHash, transaction::TransactionHash, +}; pub type BlockCachedData = ( CachedState>, @@ -90,13 +95,66 @@ pub fn execute_block_range( executions } +#[derive(Serialize)] +struct ClassExecutionInfo { + class_hash: ClassHash, + call: CallEntryPoint, + time: Duration, +} + pub fn save_executions( - file: &Path, + path: &Path, executions: Vec, ) -> Result<(), Box> { + let classes = executions + .into_iter() + .flat_map(|execution| { + let mut classes = Vec::new(); + + if let Some(call) = execution.validate_call_info { + classes.append(&mut xxx(call)); + } + if let Some(call) = execution.execute_call_info { + classes.append(&mut xxx(call)); + } + if let Some(call) = execution.fee_transfer_call_info { + classes.append(&mut xxx(call)); + } + classes + }) + .collect::>(); + + let file = File::create(path)?; + serde_json::to_writer_pretty(file, &classes)?; + Ok(()) } +fn xxx(call: CallInfo) -> Vec { + // get from storage is not available + let class_hash = call.call.class_hash.unwrap_or_default(); + + let mut time = call.time; + let mut classes = call + .inner_calls + .into_iter() + .flat_map(|call| { + time -= call.time; + xxx(call) + }) + .collect::>(); + + let top_class = ClassExecutionInfo { + class_hash, + call: call.call, + time, + }; + + classes.push(top_class); + + return classes; +} + pub fn fetch_transaction_data(tx: &str, block: BlockNumber, chain: RpcChain) -> BlockCachedData { let reader = RpcStateReader::new(chain, block); From d9878078dd4f64c118944fa910afe9275f584562 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Wed, 4 Dec 2024 18:24:00 -0300 Subject: [PATCH 04/26] Rename method --- replay/src/benchmark.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/replay/src/benchmark.rs b/replay/src/benchmark.rs index b6702b3..0b8ddae 100644 --- a/replay/src/benchmark.rs +++ b/replay/src/benchmark.rs @@ -112,13 +112,13 @@ pub fn save_executions( let mut classes = Vec::new(); if let Some(call) = execution.validate_call_info { - classes.append(&mut xxx(call)); + classes.append(&mut get_class_executions(call)); } if let Some(call) = execution.execute_call_info { - classes.append(&mut xxx(call)); + classes.append(&mut get_class_executions(call)); } if let Some(call) = execution.fee_transfer_call_info { - classes.append(&mut xxx(call)); + classes.append(&mut get_class_executions(call)); } classes }) @@ -130,7 +130,7 @@ pub fn save_executions( Ok(()) } -fn xxx(call: CallInfo) -> Vec { +fn get_class_executions(call: CallInfo) -> Vec { // get from storage is not available let class_hash = call.call.class_hash.unwrap_or_default(); @@ -140,7 +140,7 @@ fn xxx(call: CallInfo) -> Vec { .into_iter() .flat_map(|call| { time -= call.time; - xxx(call) + get_class_executions(call) }) .collect::>(); From 959e86c1a1d1dd832cb27af4d634be0630469f36 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Wed, 4 Dec 2024 18:26:51 -0300 Subject: [PATCH 05/26] Document unwrap --- replay/src/benchmark.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/replay/src/benchmark.rs b/replay/src/benchmark.rs index 0b8ddae..5f37662 100644 --- a/replay/src/benchmark.rs +++ b/replay/src/benchmark.rs @@ -131,8 +131,8 @@ pub fn save_executions( } fn get_class_executions(call: CallInfo) -> Vec { - // get from storage is not available - let class_hash = call.call.class_hash.unwrap_or_default(); + // class hash can initially be None, but it is always added before execution + let class_hash = call.call.class_hash.unwrap(); let mut time = call.time; let mut classes = call From 06d82dae4a0764b0e815ff4f9bd2cd97bf356474 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Wed, 4 Dec 2024 18:41:01 -0300 Subject: [PATCH 06/26] Add argument for output path --- replay/src/main.rs | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/replay/src/main.rs b/replay/src/main.rs index 5d28767..13e9ad4 100644 --- a/replay/src/main.rs +++ b/replay/src/main.rs @@ -20,7 +20,7 @@ use { crate::benchmark::{ execute_block_range, fetch_block_range_data, fetch_transaction_data, save_executions, }, - std::path::Path, + std::path::PathBuf, std::{ops::Div, time::Instant}, }; @@ -71,6 +71,8 @@ Caches all rpc data before the benchmark runs to provide accurate results" block_end: u64, chain: String, number_of_runs: usize, + #[arg(short, long, default_value=PathBuf::from("data").into_os_string())] + output: PathBuf, }, #[cfg(feature = "benchmark")] #[clap(about = "Measures the time it takes to run a single transaction. @@ -81,6 +83,8 @@ Caches all rpc data before the benchmark runs to provide accurate results" chain: String, block: u64, number_of_runs: usize, + #[arg(short, long, default_value=PathBuf::from("data").into_os_string())] + output: PathBuf, }, } @@ -153,6 +157,7 @@ fn main() { block_end, chain, number_of_runs, + output, } => { let block_start = BlockNumber(block_start); let block_end = BlockNumber(block_end); @@ -191,8 +196,7 @@ fn main() { info!("saving execution info"); let execution = executions.into_iter().flatten().collect::>(); - save_executions(Path::new("executions.json"), execution) - .expect("failed to save execution info"); + save_executions(&output, execution).expect("failed to save execution info"); let total_run_time = execution_time.as_secs_f64(); let average_run_time = total_run_time.div(number_of_runs as f64); @@ -212,6 +216,7 @@ fn main() { block, chain, number_of_runs, + output, } => { let chain = parse_network(&chain); let block = BlockNumber(block); @@ -256,8 +261,7 @@ fn main() { info!("saving execution info"); let execution = executions.into_iter().flatten().collect::>(); - save_executions(Path::new("executions.json"), execution) - .expect("failed to save execution info"); + save_executions(&output, execution).expect("failed to save execution info"); let total_run_time = execution_time.as_secs_f64(); let average_run_time = total_run_time.div(number_of_runs as f64); From 41d9c507ce3e44a899fa5127fc4225941976e6f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Wed, 4 Dec 2024 18:55:19 -0300 Subject: [PATCH 07/26] Disable charge fee --- rpc-state-reader/src/execution.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rpc-state-reader/src/execution.rs b/rpc-state-reader/src/execution.rs index 85f504f..fd2e464 100644 --- a/rpc-state-reader/src/execution.rs +++ b/rpc-state-reader/src/execution.rs @@ -268,7 +268,7 @@ pub fn execute_tx_with_blockifier( _ => unimplemented!(), }; - account_transaction.execute(state, &context, true, true) + account_transaction.execute(state, &context, false, true) } fn parse_to_rpc_chain(network: &str) -> RpcChain { From 0eafe1d97a0e1b4e52c59d926fc0b7cf0f394cc8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Thu, 5 Dec 2024 10:49:05 -0300 Subject: [PATCH 08/26] Ignore pyc --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 9b5d036..5247862 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ debug/ target/ +**.pyc .env .envrc From 6ebdfb2026de97678f28cd77534c8076e34f8554 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Thu, 5 Dec 2024 10:49:07 -0300 Subject: [PATCH 09/26] Add utils --- plotting/utils.py | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 plotting/utils.py diff --git a/plotting/utils.py b/plotting/utils.py new file mode 100644 index 0000000..f62ccdb --- /dev/null +++ b/plotting/utils.py @@ -0,0 +1,2 @@ +def format_hash(class_hash): + return f"{class_hash[:6]}..." From 1f94c1cf649df9f7192d175b7213c5414b3d2c4d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Thu, 5 Dec 2024 10:58:07 -0300 Subject: [PATCH 10/26] Save benchmark data output --- scripts/benchmark_tx.sh | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/scripts/benchmark_tx.sh b/scripts/benchmark_tx.sh index 1ba22c1..72d62ae 100755 --- a/scripts/benchmark_tx.sh +++ b/scripts/benchmark_tx.sh @@ -20,20 +20,24 @@ NET=$2 BLOCK=$3 LAPS=$4 -output="$TX-$NET.jsonl" -native_output="native-$output" -vm_output="vm-$output" +log_output="logs-$TX-$NET.jsonl" +native_log_output="native-$log_output" +vm_log_output="vm-$log_output" + +data_output="data-$TX-$NET.json" +native_data_output="native-$data_output" +vm_data_output="vm-$data_output" echo "Executing with Native" -cargo run --release --features benchmark,structured_logging bench-tx "$TX" "$NET" "$BLOCK" "$LAPS" > "$native_output" +cargo run --release --features benchmark,structured_logging bench-tx "$TX" "$NET" "$BLOCK" "$LAPS" -o "$native_data_output" > "$native_log_output" -native_time=$(tail -n1 "$native_output" | jq .fields.average_run_time) +native_time=$(tail -n1 "$native_log_output" | jq .fields.average_run_time) echo "Average Native time: $native_time" echo "Executing with VM" -cargo run --release --features benchmark,structured_logging,only_cairo_vm bench-tx "$TX" "$NET" "$BLOCK" "$LAPS" > "$vm_output" +cargo run --release --features benchmark,structured_logging,only_cairo_vm bench-tx "$TX" "$NET" "$BLOCK" "$LAPS" -o "$vm_data_output" > "$vm_log_output" -vm_time=$(tail -n1 "$vm_output" | jq .fields.average_run_time) +vm_time=$(tail -n1 "$vm_log_output" | jq .fields.average_run_time) echo "Average VM time: $vm_time" speedup=$(bc -l <<< "$vm_time/$native_time") From 74b46ed0711f5b3c48ddbb0e768607b3e9b6915d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Thu, 5 Dec 2024 13:10:05 -0300 Subject: [PATCH 11/26] Adapt plotter --- plotting/plot_execution_time.py | 123 +++++++++++++++++++++++--------- 1 file changed, 90 insertions(+), 33 deletions(-) diff --git a/plotting/plot_execution_time.py b/plotting/plot_execution_time.py index 14cd90f..8022a62 100644 --- a/plotting/plot_execution_time.py +++ b/plotting/plot_execution_time.py @@ -1,55 +1,112 @@ from argparse import ArgumentParser -argument_parser = ArgumentParser('Stress Test Plotter') -argument_parser.add_argument("native_logs_path") -argument_parser.add_argument("vm_logs_path") -arguments = argument_parser.parse_args() - import matplotlib.pyplot as plt import pandas as pd import seaborn as sns +from utils import format_hash + +parser = ArgumentParser("Stress Test Plotter") +parser.add_argument("native_data") +parser.add_argument("vm_data") +parser.add_argument("-s", "--speedup", action="store_true") +args = parser.parse_args() + -datasetNative = pd.read_json(arguments.native_logs_path, lines=True, typ="series") -datasetVM = pd.read_json(arguments.vm_logs_path, lines=True, typ="series") +def load_dataset(path, f): + return pd.read_json(path).apply(f, axis=1).dropna().apply(pd.Series) -def canonicalize_execution_time_by_contract_class(event): - # skip caching logs - if find_span(event, "caching block range") != None: - return None - # keep contract execution finished logs - if "contract execution finished" not in event["fields"]["message"]: - return None +def process_row(row): + class_hash = row.class_hash + time = row.time["nanos"] + row.time["secs"] * 10e9 return { - "class hash": event["span"]["class_hash"], - "time": float(event["fields"]["time"]), + "class_hash": class_hash, + "time": time, } -def find_span(event, name): - for span in event["spans"]: - if name in span["name"]: - return span - return None -def format_hash(class_hash): - return f"0x{class_hash[:6]}..." +dataNative = load_dataset(args.native_data, process_row) +dataVM = load_dataset(args.vm_data, process_row) + +# calculate mean by class hash +dataNative = dataNative.groupby("class_hash").agg( + total_time=("time", "sum"), + mean_time=("time", "mean"), +) +dataVM = dataVM.groupby("class_hash").agg( + total_time=("time", "sum"), + mean_time=("time", "mean"), +) +data = dataNative.join(dataVM, lsuffix="_native", rsuffix="_vm") + +# calculate speedup +data["speedup"] = data["total_time_vm"] / data["total_time_native"] + +# calculate total speedup +total_native = data["total_time_native"].sum() / 10e9 +print(f"Total Native: {total_native} seconds") +total_vm = data["total_time_vm"].sum() / 10e9 +print(f"Total VM: {total_vm} seconds") +print("Total Speedup:", total_vm / total_native) -datasetNative = datasetNative.apply(canonicalize_execution_time_by_contract_class).dropna().apply(pd.Series) -datasetVM = datasetVM.apply(canonicalize_execution_time_by_contract_class).dropna().apply(pd.Series) +print(data) -datasetNative = datasetNative.groupby("class hash").mean() -datasetVM = datasetVM.groupby("class hash").mean() +# ========== +# PLOTTING +# ========== -figure, ax = plt.subplots() +figure, axes = plt.subplots(1, 2) -sns.set_color_codes("bright") +ax = axes[0] -sns.barplot(ax=ax, y="class hash", x="time", data=datasetVM, formatter=format_hash, label="VM Execution Time", color="r", alpha = 0.75) # type: ignore -sns.barplot(ax=ax, y="class hash", x="time", data=datasetNative, formatter=format_hash, label="Native Execution Time", color="b", alpha = 0.75) # type: ignore +sns.barplot( + ax=ax, + y="class_hash", + x="total_time_vm", + data=data, + formatter=format_hash, + label="VM Execution Time", + color="r", + alpha=0.75, +) # type: ignore +sns.barplot( + ax=ax, + y="class_hash", + x="total_time_native", + data=data, + formatter=format_hash, + label="Native Execution Time", + color="b", + alpha=0.75, +) # type: ignore -ax.set_xlabel("Mean Time (ms)") +ax.set_xlabel("Total Time (ns)") ax.set_ylabel("Class Hash") -ax.set_title("Native vs. VM by Contract Class") +ax.set_title("Total time by Contract Class") +ax.set_xscale("log", base=2) + +ax = axes[1] + +sns.barplot( + ax=ax, + y="class_hash", + x="speedup", + data=data, + formatter=format_hash, + label="Execution Speedup", + color="b", + alpha=0.75, +) # type: ignore + +ax.set_xlabel("Speedup") +ax.set_ylabel("Class Hash") +ax.set_title("Speedup by Contract Class") + +if args.speedup: + fig, ax = plt.subplots() + sns.violinplot(ax=ax, x="speedup", data=data, cut=0) + ax.set_xlabel("Speedup") + ax.set_title("Speedup Distribution") plt.show() From 6925fef47d43581f5eb21fa4f81b6ac0e14135e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Thu, 5 Dec 2024 14:42:53 -0300 Subject: [PATCH 12/26] Concat dataframes at the start --- plotting/plot_execution_time.py | 44 ++++++++++++++++++++------------- 1 file changed, 27 insertions(+), 17 deletions(-) diff --git a/plotting/plot_execution_time.py b/plotting/plot_execution_time.py index 8022a62..00a0cec 100644 --- a/plotting/plot_execution_time.py +++ b/plotting/plot_execution_time.py @@ -27,34 +27,39 @@ def process_row(row): dataNative = load_dataset(args.native_data, process_row) +dataNative["executor"] = "native" dataVM = load_dataset(args.vm_data, process_row) +dataVM["executor"] = "vm" +data = pd.concat([dataNative, dataVM]) # calculate mean by class hash -dataNative = dataNative.groupby("class_hash").agg( - total_time=("time", "sum"), - mean_time=("time", "mean"), +data = ( + data.groupby(["executor", "class_hash"]) + .agg( + total_time=("time", "sum"), + mean_time=("time", "mean"), + ) + .unstack("executor") ) -dataVM = dataVM.groupby("class_hash").agg( - total_time=("time", "sum"), - mean_time=("time", "mean"), -) -data = dataNative.join(dataVM, lsuffix="_native", rsuffix="_vm") +data.columns = data.columns.map("_".join) # calculate speedup data["speedup"] = data["total_time_vm"] / data["total_time_native"] -# calculate total speedup total_native = data["total_time_native"].sum() / 10e9 -print(f"Total Native: {total_native} seconds") total_vm = data["total_time_vm"].sum() / 10e9 +print(f"Total Native: {total_native} seconds") print(f"Total VM: {total_vm} seconds") print("Total Speedup:", total_vm / total_native) +# sort by decreasing time +data.sort_values(["total_time_vm"], ascending=[False], inplace=True) # type: ignore + print(data) -# ========== -# PLOTTING -# ========== +# ====================== +# PLOTTING +# ====================== figure, axes = plt.subplots(1, 2) @@ -64,7 +69,7 @@ def process_row(row): ax=ax, y="class_hash", x="total_time_vm", - data=data, + data=data, # type: ignore formatter=format_hash, label="VM Execution Time", color="r", @@ -74,7 +79,7 @@ def process_row(row): ax=ax, y="class_hash", x="total_time_native", - data=data, + data=data, # type: ignore formatter=format_hash, label="Native Execution Time", color="b", @@ -92,7 +97,7 @@ def process_row(row): ax=ax, y="class_hash", x="speedup", - data=data, + data=data, # type: ignore formatter=format_hash, label="Execution Speedup", color="b", @@ -105,7 +110,12 @@ def process_row(row): if args.speedup: fig, ax = plt.subplots() - sns.violinplot(ax=ax, x="speedup", data=data, cut=0) + sns.violinplot( + ax=ax, + x="speedup", + data=data, # type: ignore + cut=0, + ) ax.set_xlabel("Speedup") ax.set_title("Speedup Distribution") From 068aca7eb60a12f81ebccbf30fc2becf00e0cd76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Thu, 5 Dec 2024 14:46:24 -0300 Subject: [PATCH 13/26] Only save selector --- replay/src/benchmark.rs | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/replay/src/benchmark.rs b/replay/src/benchmark.rs index 5f37662..d628676 100644 --- a/replay/src/benchmark.rs +++ b/replay/src/benchmark.rs @@ -15,7 +15,10 @@ use rpc_state_reader::{ }; use serde::Serialize; use starknet_api::{ - block::BlockNumber, core::ClassHash, hash::StarkHash, transaction::TransactionHash, + block::BlockNumber, + core::{ClassHash, EntryPointSelector}, + hash::StarkHash, + transaction::TransactionHash, }; pub type BlockCachedData = ( @@ -98,7 +101,7 @@ pub fn execute_block_range( #[derive(Serialize)] struct ClassExecutionInfo { class_hash: ClassHash, - call: CallEntryPoint, + selector: EntryPointSelector, time: Duration, } @@ -146,7 +149,7 @@ fn get_class_executions(call: CallInfo) -> Vec { let top_class = ClassExecutionInfo { class_hash, - call: call.call, + selector: call.call.entry_point_selector, time, }; From c46382d2224f80e8b20039a18649b9de8dbbd028 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Thu, 5 Dec 2024 15:26:57 -0300 Subject: [PATCH 14/26] Point to sequencer --- Cargo.lock | 36 ++++++++++++++++++------------------ Cargo.toml | 8 ++++---- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3ba4c3a..c58c7cd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -655,7 +655,7 @@ dependencies = [ [[package]] name = "blockifier" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "anyhow", "ark-ec", @@ -699,7 +699,7 @@ dependencies = [ [[package]] name = "blockifier_reexecution" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "assert_matches", "blockifier", @@ -3770,7 +3770,7 @@ checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5" [[package]] name = "infra_utils" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "tokio", ] @@ -4539,7 +4539,7 @@ dependencies = [ [[package]] name = "mempool_test_utils" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "assert_matches", "blockifier", @@ -5003,7 +5003,7 @@ dependencies = [ [[package]] name = "papyrus_common" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "base64 0.13.1", "cairo-lang-starknet-classes", @@ -5022,7 +5022,7 @@ dependencies = [ [[package]] name = "papyrus_config" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "clap", "infra_utils", @@ -5037,7 +5037,7 @@ dependencies = [ [[package]] name = "papyrus_execution" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "anyhow", "blockifier", @@ -5060,7 +5060,7 @@ dependencies = [ [[package]] name = "papyrus_network_types" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "libp2p", "serde", @@ -5069,7 +5069,7 @@ dependencies = [ [[package]] name = "papyrus_proc_macros" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "quote", "syn 2.0.90", @@ -5079,7 +5079,7 @@ dependencies = [ [[package]] name = "papyrus_rpc" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "anyhow", "async-trait", @@ -5113,7 +5113,7 @@ dependencies = [ [[package]] name = "papyrus_storage" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "byteorder", "cairo-lang-casm", @@ -6950,7 +6950,7 @@ dependencies = [ [[package]] name = "starknet_api" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "bitvec", "cairo-lang-runner", @@ -6976,7 +6976,7 @@ dependencies = [ [[package]] name = "starknet_client" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "async-trait", "cairo-lang-starknet-classes", @@ -7003,7 +7003,7 @@ dependencies = [ [[package]] name = "starknet_gateway" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "async-trait", "axum", @@ -7031,7 +7031,7 @@ dependencies = [ [[package]] name = "starknet_gateway_types" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "async-trait", "axum", @@ -7050,7 +7050,7 @@ dependencies = [ [[package]] name = "starknet_mempool_types" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "async-trait", "papyrus_network_types", @@ -7064,7 +7064,7 @@ dependencies = [ [[package]] name = "starknet_sequencer_infra" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "async-trait", "bincode 1.3.3", @@ -7082,7 +7082,7 @@ dependencies = [ [[package]] name = "starknet_sierra_compile" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=bfc5b6b5475d359b8fc910516e026d972be5d02f#bfc5b6b5475d359b8fc910516e026d972be5d02f" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" dependencies = [ "cairo-lang-sierra", "cairo-lang-starknet-classes", diff --git a/Cargo.toml b/Cargo.toml index a6f7403..34f3c25 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,7 +14,7 @@ serde_with = "3.11.0" serde = "1.0.197" cairo-native = "0.2.4" # Sequencer Dependencies -starknet_api = { git = "https://github.com/lambdaclass/sequencer.git", rev = "bfc5b6b5475d359b8fc910516e026d972be5d02f" } # replay -blockifier = { git = "https://github.com/lambdaclass/sequencer.git", rev = "bfc5b6b5475d359b8fc910516e026d972be5d02f", features = ["cairo_native"] } # replay -starknet_gateway = { git = "https://github.com/lambdaclass/sequencer.git", rev = "bfc5b6b5475d359b8fc910516e026d972be5d02f" } # replay -blockifier_reexecution = { git = "https://github.com/lambdaclass/sequencer.git", rev = "bfc5b6b5475d359b8fc910516e026d972be5d02f" } # replay +starknet_api = { git = "https://github.com/lambdaclass/sequencer.git", rev = "ba98788435a204d39296496966499cd89ec9a146" } # replay +blockifier = { git = "https://github.com/lambdaclass/sequencer.git", rev = "ba98788435a204d39296496966499cd89ec9a146", features = ["cairo_native"] } # replay +starknet_gateway = { git = "https://github.com/lambdaclass/sequencer.git", rev = "ba98788435a204d39296496966499cd89ec9a146" } # replay +blockifier_reexecution = { git = "https://github.com/lambdaclass/sequencer.git", rev = "ba98788435a204d39296496966499cd89ec9a146" } # replay From e484f45cd4349b241cda481ba70805a59b830e23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Thu, 5 Dec 2024 15:33:35 -0300 Subject: [PATCH 15/26] Update benchmark block --- scripts/benchmark_block.sh | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/scripts/benchmark_block.sh b/scripts/benchmark_block.sh index c9fc50f..5b9474b 100755 --- a/scripts/benchmark_block.sh +++ b/scripts/benchmark_block.sh @@ -21,20 +21,24 @@ END=$2 NET=$3 LAPS=$4 -output="block-$START-$END-$NET.jsonl" -native_output="native-$output" -vm_output="vm-$output" +log_output="logs-$START-$END-$NET.jsonl" +native_log_output="native-$log_output" +vm_log_output="vm-$log_output" + +data_output="data-$START-$END-$NET.jsonl" +native_data_output="native-$data_output" +vm_data_output="vm-$data_output" echo "Executing with Native" -cargo run --release --features benchmark,structured_logging bench-block-range "$START" "$END" "$NET" "$LAPS" > "$native_output" +cargo run --release --features benchmark,structured_logging bench-block-range "$START" "$END" "$NET" "$LAPS" -o "$native_data_output" > "$native_log_output" -native_time=$(tail -n1 "$native_output" | jq .fields.average_run_time) +native_time=$(tail -n1 "$native_log_output" | jq .fields.average_run_time) echo "Average Native time: $native_time" echo "Executing with VM" -cargo run --release --features benchmark,structured_logging,only_cairo_vm bench-block-range "$START" "$END" "$NET" "$LAPS" > "$vm_output" +cargo run --release --features benchmark,structured_logging,only_cairo_vm bench-block-range "$START" "$END" "$NET" "$LAPS" -o "$vm_data_output" > "$vm_log_output" -vm_time=$(tail -n1 "$vm_output" | jq .fields.average_run_time) +vm_time=$(tail -n1 "$vm_log_output" | jq .fields.average_run_time) echo "Average VM time: $vm_time" speedup=$(bc -l <<< "$vm_time/$native_time") From 9aba89805b4ca433d4ee3de90fbf8ae837499b48 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Thu, 5 Dec 2024 15:55:24 -0300 Subject: [PATCH 16/26] Use .json instead of .jsonl --- scripts/benchmark_block.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/benchmark_block.sh b/scripts/benchmark_block.sh index 5b9474b..712e8a8 100755 --- a/scripts/benchmark_block.sh +++ b/scripts/benchmark_block.sh @@ -25,7 +25,7 @@ log_output="logs-$START-$END-$NET.jsonl" native_log_output="native-$log_output" vm_log_output="vm-$log_output" -data_output="data-$START-$END-$NET.jsonl" +data_output="data-$START-$END-$NET.json" native_data_output="native-$data_output" vm_data_output="vm-$data_output" From a30047624451894bfd8c824197e006e7a85802eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Thu, 5 Dec 2024 16:16:28 -0300 Subject: [PATCH 17/26] Fail if missmatch between native and vm --- plotting/plot_execution_time.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/plotting/plot_execution_time.py b/plotting/plot_execution_time.py index 00a0cec..bef5bf4 100644 --- a/plotting/plot_execution_time.py +++ b/plotting/plot_execution_time.py @@ -38,11 +38,15 @@ def process_row(row): .agg( total_time=("time", "sum"), mean_time=("time", "mean"), + samples=("time", "size"), ) .unstack("executor") ) data.columns = data.columns.map("_".join) +if (data["samples_native"] != data["samples_vm"]).any(): + raise Exception("Native and VM should have the same number of samples") + # calculate speedup data["speedup"] = data["total_time_vm"] / data["total_time_native"] From 8eb51e9b345412aef9ef28b1e57863758f28564c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Thu, 5 Dec 2024 18:37:31 -0300 Subject: [PATCH 18/26] Fix clippy --- replay/src/benchmark.rs | 4 +--- replay/src/main.rs | 9 ++++++--- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/replay/src/benchmark.rs b/replay/src/benchmark.rs index d628676..7904ac9 100644 --- a/replay/src/benchmark.rs +++ b/replay/src/benchmark.rs @@ -2,9 +2,7 @@ use std::{error::Error, fs::File, path::Path, time::Duration}; use blockifier::{ context::BlockContext, - execution::{ - call_info::CallInfo, contract_class::RunnableCompiledClass, entry_point::CallEntryPoint, - }, + execution::{call_info::CallInfo, contract_class::RunnableCompiledClass}, state::{cached_state::CachedState, state_api::StateReader}, transaction::objects::TransactionExecutionInfo, }; diff --git a/replay/src/main.rs b/replay/src/main.rs index 13e9ad4..80b7c91 100644 --- a/replay/src/main.rs +++ b/replay/src/main.rs @@ -1,6 +1,3 @@ -use std::thread; -use std::time::Duration; - use blockifier::state::cached_state::CachedState; use blockifier::state::errors::StateError; use blockifier::transaction::objects::{RevertError, TransactionExecutionInfo}; @@ -21,6 +18,8 @@ use { execute_block_range, fetch_block_range_data, fetch_transaction_data, save_executions, }, std::path::PathBuf, + std::thread, + std::time::Duration, std::{ops::Div, time::Instant}, }; @@ -182,6 +181,10 @@ fn main() { block_range_data }; + // We pause the main thread to differentiate + // caching from benchmarking from within a profiler + thread::sleep(Duration::from_secs(1)); + { let _benchmark_span = info_span!("benchmarking block range").entered(); From 1ac30fc5876fdf5df9d95cb1035c7a61ebe1555d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Thu, 5 Dec 2024 18:57:01 -0300 Subject: [PATCH 19/26] Fix clippy --- replay/src/benchmark.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/replay/src/benchmark.rs b/replay/src/benchmark.rs index 7904ac9..4986e5e 100644 --- a/replay/src/benchmark.rs +++ b/replay/src/benchmark.rs @@ -153,7 +153,7 @@ fn get_class_executions(call: CallInfo) -> Vec { classes.push(top_class); - return classes; + classes } pub fn fetch_transaction_data(tx: &str, block: BlockNumber, chain: RpcChain) -> BlockCachedData { From a2db2c4a7e3256549a3fe9e93afc2558dd7d2094 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Fri, 6 Dec 2024 16:05:30 -0300 Subject: [PATCH 20/26] Update sequencer rev --- Cargo.lock | 52 ++++++++++++++++++++++++++-------------------------- Cargo.toml | 8 ++++---- 2 files changed, 30 insertions(+), 30 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c58c7cd..724fa19 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -655,7 +655,7 @@ dependencies = [ [[package]] name = "blockifier" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "anyhow", "ark-ec", @@ -699,7 +699,7 @@ dependencies = [ [[package]] name = "blockifier_reexecution" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "assert_matches", "blockifier", @@ -1574,9 +1574,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.22" +version = "4.5.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69371e34337c4c984bbe322360c2547210bf632eb2814bbe78a6e87a2935bd2b" +checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84" dependencies = [ "clap_builder", "clap_derive", @@ -1584,9 +1584,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.22" +version = "4.5.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e24c1b4099818523236a8ca881d2b45db98dadfb4625cf6608c12069fcbbde1" +checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838" dependencies = [ "anstream", "anstyle", @@ -1608,9 +1608,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afb84c814227b90d6895e01398aee0d8033c00e7466aca416fb6a8e0eb19d8a7" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "coins-bip32" @@ -3770,7 +3770,7 @@ checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5" [[package]] name = "infra_utils" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "tokio", ] @@ -4539,7 +4539,7 @@ dependencies = [ [[package]] name = "mempool_test_utils" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "assert_matches", "blockifier", @@ -5003,7 +5003,7 @@ dependencies = [ [[package]] name = "papyrus_common" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "base64 0.13.1", "cairo-lang-starknet-classes", @@ -5022,7 +5022,7 @@ dependencies = [ [[package]] name = "papyrus_config" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "clap", "infra_utils", @@ -5037,7 +5037,7 @@ dependencies = [ [[package]] name = "papyrus_execution" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "anyhow", "blockifier", @@ -5060,7 +5060,7 @@ dependencies = [ [[package]] name = "papyrus_network_types" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "libp2p", "serde", @@ -5069,7 +5069,7 @@ dependencies = [ [[package]] name = "papyrus_proc_macros" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "quote", "syn 2.0.90", @@ -5079,7 +5079,7 @@ dependencies = [ [[package]] name = "papyrus_rpc" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "anyhow", "async-trait", @@ -5113,7 +5113,7 @@ dependencies = [ [[package]] name = "papyrus_storage" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "byteorder", "cairo-lang-casm", @@ -6950,7 +6950,7 @@ dependencies = [ [[package]] name = "starknet_api" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "bitvec", "cairo-lang-runner", @@ -6976,7 +6976,7 @@ dependencies = [ [[package]] name = "starknet_client" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "async-trait", "cairo-lang-starknet-classes", @@ -7003,7 +7003,7 @@ dependencies = [ [[package]] name = "starknet_gateway" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "async-trait", "axum", @@ -7031,7 +7031,7 @@ dependencies = [ [[package]] name = "starknet_gateway_types" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "async-trait", "axum", @@ -7050,7 +7050,7 @@ dependencies = [ [[package]] name = "starknet_mempool_types" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "async-trait", "papyrus_network_types", @@ -7064,7 +7064,7 @@ dependencies = [ [[package]] name = "starknet_sequencer_infra" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "async-trait", "bincode 1.3.3", @@ -7082,7 +7082,7 @@ dependencies = [ [[package]] name = "starknet_sierra_compile" version = "0.0.0" -source = "git+https://github.com/lambdaclass/sequencer.git?rev=ba98788435a204d39296496966499cd89ec9a146#ba98788435a204d39296496966499cd89ec9a146" +source = "git+https://github.com/lambdaclass/sequencer.git?rev=2ad9ecad71bd71304b80c36992f41568c82313ad#2ad9ecad71bd71304b80c36992f41568c82313ad" dependencies = [ "cairo-lang-sierra", "cairo-lang-starknet-classes", @@ -7523,9 +7523,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ "futures-core", "pin-project-lite", diff --git a/Cargo.toml b/Cargo.toml index 34f3c25..95485b7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,7 +14,7 @@ serde_with = "3.11.0" serde = "1.0.197" cairo-native = "0.2.4" # Sequencer Dependencies -starknet_api = { git = "https://github.com/lambdaclass/sequencer.git", rev = "ba98788435a204d39296496966499cd89ec9a146" } # replay -blockifier = { git = "https://github.com/lambdaclass/sequencer.git", rev = "ba98788435a204d39296496966499cd89ec9a146", features = ["cairo_native"] } # replay -starknet_gateway = { git = "https://github.com/lambdaclass/sequencer.git", rev = "ba98788435a204d39296496966499cd89ec9a146" } # replay -blockifier_reexecution = { git = "https://github.com/lambdaclass/sequencer.git", rev = "ba98788435a204d39296496966499cd89ec9a146" } # replay +starknet_api = { git = "https://github.com/lambdaclass/sequencer.git", rev = "2ad9ecad71bd71304b80c36992f41568c82313ad" } # replay +blockifier = { git = "https://github.com/lambdaclass/sequencer.git", rev = "2ad9ecad71bd71304b80c36992f41568c82313ad", features = ["cairo_native"] } # replay +starknet_gateway = { git = "https://github.com/lambdaclass/sequencer.git", rev = "2ad9ecad71bd71304b80c36992f41568c82313ad" } # replay +blockifier_reexecution = { git = "https://github.com/lambdaclass/sequencer.git", rev = "2ad9ecad71bd71304b80c36992f41568c82313ad" } # replay From 74ee2ab1da714d98bfec46332bd43dd285d0eba7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Fri, 6 Dec 2024 16:20:06 -0300 Subject: [PATCH 21/26] Handle time zero bug --- replay/src/benchmark.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/replay/src/benchmark.rs b/replay/src/benchmark.rs index 4986e5e..4537be1 100644 --- a/replay/src/benchmark.rs +++ b/replay/src/benchmark.rs @@ -135,16 +135,22 @@ fn get_class_executions(call: CallInfo) -> Vec { // class hash can initially be None, but it is always added before execution let class_hash = call.call.class_hash.unwrap(); - let mut time = call.time; + let mut inner_time = Duration::ZERO; + let mut classes = call .inner_calls .into_iter() .flat_map(|call| { - time -= call.time; + inner_time += call.time; get_class_executions(call) }) .collect::>(); + if call.time.is_zero() { + panic!("contract time should never be zero, there is a bug somewhere") + } + let time = call.time - inner_time; + let top_class = ClassExecutionInfo { class_hash, selector: call.call.entry_point_selector, From 91f22c034025b2d81507f8443f89d7a05a63df56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Mon, 9 Dec 2024 15:48:26 -0300 Subject: [PATCH 22/26] Print csv --- plotting/plot_execution_time.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/plotting/plot_execution_time.py b/plotting/plot_execution_time.py index bef5bf4..693ea54 100644 --- a/plotting/plot_execution_time.py +++ b/plotting/plot_execution_time.py @@ -3,6 +3,7 @@ import matplotlib.pyplot as plt import pandas as pd import seaborn as sns +import io from utils import format_hash parser = ArgumentParser("Stress Test Plotter") @@ -11,6 +12,9 @@ parser.add_argument("-s", "--speedup", action="store_true") args = parser.parse_args() +pd.set_option("display.max_columns", None) +pd.set_option("display.max_rows", None) + def load_dataset(path, f): return pd.read_json(path).apply(f, axis=1).dropna().apply(pd.Series) @@ -59,7 +63,11 @@ def process_row(row): # sort by decreasing time data.sort_values(["total_time_vm"], ascending=[False], inplace=True) # type: ignore -print(data) +s = io.StringIO() +data.to_csv(s) +print(s.getvalue()) + +data = data.nlargest(50, "total_time_vm") # type: ignore # ====================== # PLOTTING From 870c2da0874e0bda1a1e230dc7a78c43fb9cfed3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Mon, 9 Dec 2024 18:30:12 -0300 Subject: [PATCH 23/26] Group by selector also --- plotting/plot_execution_time.py | 51 +++++++++++++++++++++++---------- 1 file changed, 36 insertions(+), 15 deletions(-) diff --git a/plotting/plot_execution_time.py b/plotting/plot_execution_time.py index 693ea54..f8fecda 100644 --- a/plotting/plot_execution_time.py +++ b/plotting/plot_execution_time.py @@ -22,10 +22,12 @@ def load_dataset(path, f): def process_row(row): class_hash = row.class_hash + selector = row.selector time = row.time["nanos"] + row.time["secs"] * 10e9 return { "class_hash": class_hash, + "selector": selector, "time": time, } @@ -36,9 +38,11 @@ def process_row(row): dataVM["executor"] = "vm" data = pd.concat([dataNative, dataVM]) +# GROUP BY SELECTOR + # calculate mean by class hash -data = ( - data.groupby(["executor", "class_hash"]) +data_by_selector = ( + data.groupby(["executor", "class_hash", "selector"]) .agg( total_time=("time", "sum"), mean_time=("time", "mean"), @@ -46,28 +50,45 @@ def process_row(row): ) .unstack("executor") ) -data.columns = data.columns.map("_".join) +data_by_selector.columns = data_by_selector.columns.map("_".join) -if (data["samples_native"] != data["samples_vm"]).any(): +if (data_by_selector["samples_native"] != data_by_selector["samples_vm"]).any(): raise Exception("Native and VM should have the same number of samples") # calculate speedup -data["speedup"] = data["total_time_vm"] / data["total_time_native"] - -total_native = data["total_time_native"].sum() / 10e9 -total_vm = data["total_time_vm"].sum() / 10e9 +data_by_selector["speedup"] = ( + data_by_selector["total_time_vm"] / data_by_selector["total_time_native"] +) +total_native = data_by_selector["total_time_native"].sum() / 10e9 +total_vm = data_by_selector["total_time_vm"].sum() / 10e9 print(f"Total Native: {total_native} seconds") print(f"Total VM: {total_vm} seconds") print("Total Speedup:", total_vm / total_native) # sort by decreasing time -data.sort_values(["total_time_vm"], ascending=[False], inplace=True) # type: ignore +data_by_selector.sort_values(["total_time_vm"], ascending=[False], inplace=True) # type: ignore s = io.StringIO() -data.to_csv(s) +data_by_selector.to_csv(s) print(s.getvalue()) -data = data.nlargest(50, "total_time_vm") # type: ignore +# GROUP BY CLASS + +data_by_class = ( + data.groupby(["executor", "class_hash"]) + .agg( + total_time=("time", "sum"), + mean_time=("time", "mean"), + samples=("time", "size"), + ) + .unstack("executor") +) +data_by_class.columns = data_by_class.columns.map("_".join) +data_by_class["speedup"] = ( + data_by_class["total_time_vm"] / data_by_class["total_time_native"] +) +data_by_class.sort_values(["total_time_vm"], ascending=[False], inplace=True) # type: ignore +data_by_class = data_by_class.nlargest(50, "total_time_vm") # type: ignore # ====================== # PLOTTING @@ -81,7 +102,7 @@ def process_row(row): ax=ax, y="class_hash", x="total_time_vm", - data=data, # type: ignore + data=data_by_class, # type: ignore formatter=format_hash, label="VM Execution Time", color="r", @@ -91,7 +112,7 @@ def process_row(row): ax=ax, y="class_hash", x="total_time_native", - data=data, # type: ignore + data=data_by_class, # type: ignore formatter=format_hash, label="Native Execution Time", color="b", @@ -109,7 +130,7 @@ def process_row(row): ax=ax, y="class_hash", x="speedup", - data=data, # type: ignore + data=data_by_class, # type: ignore formatter=format_hash, label="Execution Speedup", color="b", @@ -125,7 +146,7 @@ def process_row(row): sns.violinplot( ax=ax, x="speedup", - data=data, # type: ignore + data=data_by_class, # type: ignore cut=0, ) ax.set_xlabel("Speedup") From d3f19011a84e08ec3aa5a4132a2afd48c29437df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Mon, 9 Dec 2024 18:43:32 -0300 Subject: [PATCH 24/26] Update README --- README.md | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 6215aaf..32065b9 100644 --- a/README.md +++ b/README.md @@ -126,17 +126,25 @@ In the `plotting` directory, you can find python scripts to plot relevant inform Make sure to erase the `compiled_programs` directory, then run: ```bash -cargo run --features benchmark,structured_logging bench-block-range 724000 724000 mainnet 1 | tee native-logs -cargo run --features benchmark,structured_logging,only_cairo_vm bench-block-range 724000 724000 mainnet 1 | tee vm-logs +./scripts/benchmark_tx.sh +``` + +This generates four files: +- `{native,vm}-data-$tx-$net.json`: Contains the execution time of each contract call +- `{native,vm}-logs-$tx-$net.json`: Contains the output of running the benchmark + +If you want to benchmark a full block, you could run: +```bash +./scripts/benchmark_block.sh ``` Once you have done this, you can use the plotting scripts: +- `python ./plotting/plot_execution_time.py native-data vm-data`: Plots the execution time of Native vs VM, by contract class. - `python ./plotting/plot_compilation_memory.py native-logs`: Size of the compiled native libraries, by contract class. - `python ./plotting/plot_compilation_memory_corr.py native-logs vm-logs`: Size of the compiled native libraries, by the associated Casm contract size. - `python ./plotting/plot_compilation_memory_trend.py native-logs vm-logs`: Size of the compiled native and casm contracts, by the sierra contract size. - `python ./plotting/plot_compilation_time.py native-logs`: Native compilation time, by contract class - `python ./plotting/plot_compilation_time_trend.py native-logs vm-logs`: Native and Casm compilation time, by the sierra contract size. -- `python ./plotting/plot_execution_time.py native-logs vm-logs`: Plots the execution time of Native vs VM, by contract class. - `python ./plotting/plot_compilation_time_finer.py native-logs`: Native compilation time, with fine-grained stage separation, by contract class. From 29d8cfb0175a1920b2d753b76228792974162b59 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Wed, 11 Dec 2024 16:05:55 -0300 Subject: [PATCH 25/26] Add profiling feature --- replay/Cargo.toml | 1 + replay/src/main.rs | 1 + 2 files changed, 2 insertions(+) diff --git a/replay/Cargo.toml b/replay/Cargo.toml index 85bc407..35d3ee2 100644 --- a/replay/Cargo.toml +++ b/replay/Cargo.toml @@ -8,6 +8,7 @@ benchmark = ["dep:serde", "dep:serde_json", "dep:serde_with"] # The only_cairo_vm feature is designed to avoid executing transitions with cairo_native and instead use cairo_vm exclusively only_cairo_vm = ["rpc-state-reader/only_casm"] structured_logging = [] +profiling = [] state_dump = ["dep:serde", "dep:serde_json", "dep:serde_with", "dep:starknet-types-core"] [dependencies] diff --git a/replay/src/main.rs b/replay/src/main.rs index 80b7c91..ec8584c 100644 --- a/replay/src/main.rs +++ b/replay/src/main.rs @@ -248,6 +248,7 @@ fn main() { // We pause the main thread to differentiate // caching from benchmarking from within a profiler + #[cfg(profiling)] thread::sleep(Duration::from_secs(1)); { From ab4d981982a2dfdecc3c6fd76edfcc2983d16f8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juli=C3=A1n=20Gonz=C3=A1lez=20Calder=C3=B3n?= Date: Thu, 12 Dec 2024 11:10:09 -0300 Subject: [PATCH 26/26] Fix clippy --- replay/src/main.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/replay/src/main.rs b/replay/src/main.rs index ec8584c..ac69497 100644 --- a/replay/src/main.rs +++ b/replay/src/main.rs @@ -183,6 +183,7 @@ fn main() { // We pause the main thread to differentiate // caching from benchmarking from within a profiler + #[cfg(feature = "profiling")] thread::sleep(Duration::from_secs(1)); { @@ -248,7 +249,7 @@ fn main() { // We pause the main thread to differentiate // caching from benchmarking from within a profiler - #[cfg(profiling)] + #[cfg(feature = "profiling")] thread::sleep(Duration::from_secs(1)); {