diff --git a/Cargo.lock b/Cargo.lock index 318b2ae6af..21a5837fc7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10498,6 +10498,7 @@ dependencies = [ "itertools 0.12.1", "mempool_test_utils", "papyrus_common", + "papyrus_config", "papyrus_consensus", "papyrus_execution", "papyrus_network", diff --git a/crates/papyrus_config/src/dumping.rs b/crates/papyrus_config/src/dumping.rs index 1acddda112..7b13b04641 100644 --- a/crates/papyrus_config/src/dumping.rs +++ b/crates/papyrus_config/src/dumping.rs @@ -363,7 +363,7 @@ pub fn ser_pointer_target_required_param( /// Updates entries in the map to point to these targets, replacing values of entries that match /// the target parameter paths to contain only the name of the target they point to. /// Fails if a param is not pointing to a same-named pointer target nor whitelisted. -pub(crate) fn combine_config_map_and_pointers( +pub fn combine_config_map_and_pointers( mut config_map: BTreeMap, pointers: &ConfigPointers, non_pointer_params: &Pointers, diff --git a/crates/starknet_integration_tests/Cargo.toml b/crates/starknet_integration_tests/Cargo.toml index 95a07bf215..ffbb1f5e38 100644 --- a/crates/starknet_integration_tests/Cargo.toml +++ b/crates/starknet_integration_tests/Cargo.toml @@ -18,6 +18,7 @@ indexmap.workspace = true infra_utils.workspace = true mempool_test_utils.workspace = true papyrus_common.workspace = true +papyrus_config.workspace = true papyrus_consensus.workspace = true papyrus_execution.workspace = true papyrus_network = { workspace = true, features = ["testing"] } diff --git a/crates/starknet_integration_tests/src/config_utils.rs b/crates/starknet_integration_tests/src/config_utils.rs index de792df733..a068c99978 100644 --- a/crates/starknet_integration_tests/src/config_utils.rs +++ b/crates/starknet_integration_tests/src/config_utils.rs @@ -3,7 +3,8 @@ use std::io::Write; use std::net::SocketAddr; use std::path::PathBuf; -use serde_json::{json, Value}; +use papyrus_config::dumping::{combine_config_map_and_pointers, SerializeConfig}; +use serde_json::{json, Map, Value}; use starknet_sequencer_infra::component_definitions::{ LocalServerConfig, RemoteClientConfig, @@ -16,9 +17,14 @@ use starknet_sequencer_node::config::component_execution_config::{ ReactiveComponentExecutionConfig, ReactiveComponentExecutionMode, }; -use starknet_sequencer_node::config::node_config::SequencerNodeConfig; +use starknet_sequencer_node::config::node_config::{ + SequencerNodeConfig, + CONFIG_NON_POINTERS_WHITELIST, + CONFIG_POINTERS, +}; use starknet_sequencer_node::config::test_utils::RequiredParams; use tracing::info; + // TODO(Tsabary): Move here all config-related functions from "integration_test_utils.rs". const NODE_CONFIG_CHANGES_FILE_PATH: &str = "node_integration_test_config_changes.json"; @@ -47,7 +53,7 @@ macro_rules! config_fields_to_json { }; } -/// Creates a config file for the sequencer node for the end to end integration test. +/// Creates a config file for the sequencer node for an integration test. pub(crate) fn dump_config_file_changes( config: &SequencerNodeConfig, required_params: RequiredParams, @@ -55,22 +61,30 @@ pub(crate) fn dump_config_file_changes( ) -> PathBuf { // Dump config changes file for the sequencer node. // TODO(Tsabary): auto dump the entirety of RequiredParams fields. - let json_data = config_fields_to_json!( + let required_params_json = config_fields_to_json!( required_params.chain_id, required_params.eth_fee_token_address, required_params.strk_fee_token_address, required_params.validator_id, - config.rpc_state_reader_config.url, - config.batcher_config.storage.db_config.path_prefix, - config.http_server_config.ip, - config.http_server_config.port, - config.consensus_manager_config.consensus_config.start_height, - config.state_sync_config.storage_config.db_config.path_prefix, - config.state_sync_config.network_config.tcp_port, ); - let node_config_path = dump_json_data(json_data, NODE_CONFIG_CHANGES_FILE_PATH, dir); - assert!(node_config_path.exists(), "File does not exist: {:?}", node_config_path); + // Create the entire mapping of the config and the pointers, without the required params. + let config_as_map = combine_config_map_and_pointers( + config.dump(), + &CONFIG_POINTERS, + &CONFIG_NON_POINTERS_WHITELIST, + ) + .unwrap(); + + // Extract only the required fields from the config map. + let mut preset = config_to_preset(&config_as_map); + + // Add the required params to the preset. + add_required_params_to_preset(&mut preset, &required_params_json); + + // Dump the preset to a file, return its path. + let node_config_path = dump_json_data(preset, NODE_CONFIG_CHANGES_FILE_PATH, dir); + assert!(node_config_path.exists(), "File does not exist: {:?}", node_config_path); node_config_path } @@ -157,3 +171,76 @@ pub async fn get_remote_flow_test_config() -> Vec { get_non_http_component_config(gateway_socket).await, ] } + +/// Transforms a nested JSON dictionary object into a simplified JSON dictionary object by +/// extracting specific values from the inner dictionaries. +/// +/// # Parameters +/// - `config_map`: A reference to a `serde_json::Value` that must be a JSON dictionary object. Each +/// key in the object maps to another JSON dictionary object. +/// +/// # Returns +/// - A `serde_json::Value` dictionary object where: +/// - Each key is preserved from the top-level dictionary. +/// - Each value corresponds to the `"value"` field of the nested JSON dictionary under the +/// original key. +/// +/// # Panics +/// This function panics if the provided `config_map` is not a JSON dictionary object. +fn config_to_preset(config_map: &Value) -> Value { + // Ensure the config_map is a JSON object. + if let Value::Object(map) = config_map { + let mut result = Map::new(); + + for (key, value) in map { + if let Value::Object(inner_map) = value { + // Extract the value. + if let Some(inner_value) = inner_map.get("value") { + // Add it to the result map + result.insert(key.clone(), inner_value.clone()); + } + } + } + + // Return the transformed result as a JSON object. + Value::Object(result) + } else { + panic!("Config map is not a JSON object: {:?}", config_map); + } +} + +/// Merges required parameters into an existing preset JSON object. +/// +/// # Parameters +/// - `preset`: A mutable reference to a `serde_json::Value` representing the preset. It must be a +/// JSON dictionary object where additional parameters will be added. +/// - `required_params`: A reference to a `serde_json::Value` representing the required parameters. +/// It must also be a JSON dictionary object. Its keys and values will be merged into the +/// `preset`. +/// +/// # Behavior +/// - For each key-value pair in `required_params`, the pair is inserted into `preset`. +/// - If a key already exists in `preset`, its value will be overwritten by the value from +/// `required_params`. +/// - Both `preset` and `required_params` must be JSON dictionary objects; otherwise, the function +/// panics. +/// +/// # Panics +/// This function panics if either `preset` or `required_params` is not a JSON dictionary object, or +/// if the `preset` already contains a key from the `required_params`. +fn add_required_params_to_preset(preset: &mut Value, required_params: &Value) { + if let (Value::Object(preset_map), Value::Object(required_params_map)) = + (preset, required_params) + { + for (key, value) in required_params_map { + assert!( + !preset_map.contains_key(key), + "Required parameter already exists in the preset: {:?}", + key + ); + preset_map.insert(key.clone(), value.clone()); + } + } else { + panic!("Expecting JSON object dictionary objects"); + } +} diff --git a/crates/starknet_integration_tests/src/end_to_end_integration.rs b/crates/starknet_integration_tests/src/end_to_end_integration.rs index d44238fa26..6a1b263cac 100644 --- a/crates/starknet_integration_tests/src/end_to_end_integration.rs +++ b/crates/starknet_integration_tests/src/end_to_end_integration.rs @@ -1,4 +1,5 @@ use infra_utils::run_until::run_until; +use infra_utils::tracing::{CustomLogger, TraceLevel}; use mempool_test_utils::starknet_api_test_utils::{AccountId, MultiAccountTransactionGenerator}; use papyrus_execution::execution_utils::get_nonce_at; use papyrus_storage::state::StateStorageReader; @@ -43,7 +44,12 @@ async fn await_block( let condition = |&latest_block_number: &BlockNumber| latest_block_number >= target_block_number; let get_latest_block_number_closure = || async move { get_latest_block_number(storage_reader) }; - run_until(interval, max_attempts, get_latest_block_number_closure, condition, None) + let logger = CustomLogger::new( + TraceLevel::Info, + Some("Waiting for storage to include block".to_string()), + ); + + run_until(interval, max_attempts, get_latest_block_number_closure, condition, Some(logger)) .await .ok_or(()) } @@ -86,7 +92,7 @@ pub async fn end_to_end_integration(mut tx_generator: MultiAccountTransactionGen papyrus_storage::open_storage(integration_test_setup.batcher_storage_config) .expect("Failed to open batcher's storage"); - await_block(5000, EXPECTED_BLOCK_NUMBER, 15, &batcher_storage_reader) + await_block(5000, EXPECTED_BLOCK_NUMBER, 30, &batcher_storage_reader) .await .expect("Block number should have been reached.");