Skip to content

Commit

Permalink
chore(starknet_integration_tests): dump entire config to a preset file
Browse files Browse the repository at this point in the history
commit-id:b6e3a6d6
  • Loading branch information
Itay-Tsabary-Starkware committed Dec 18, 2024
1 parent aff6e32 commit 7db4a12
Show file tree
Hide file tree
Showing 5 changed files with 166 additions and 16 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion crates/papyrus_config/src/dumping.rs
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,7 @@ pub fn ser_pointer_target_required_param(
/// Updates entries in the map to point to these targets, replacing values of entries that match
/// the target parameter paths to contain only the name of the target they point to.
/// Fails if a param is not pointing to a same-named pointer target nor whitelisted.
pub(crate) fn combine_config_map_and_pointers(
pub fn combine_config_map_and_pointers(
mut config_map: BTreeMap<ParamPath, SerializedParam>,
pointers: &ConfigPointers,
non_pointer_params: &Pointers,
Expand Down
1 change: 1 addition & 0 deletions crates/starknet_integration_tests/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ indexmap.workspace = true
infra_utils.workspace = true
mempool_test_utils.workspace = true
papyrus_common.workspace = true
papyrus_config.workspace = true
papyrus_consensus.workspace = true
papyrus_execution.workspace = true
papyrus_network = { workspace = true, features = ["testing"] }
Expand Down
168 changes: 155 additions & 13 deletions crates/starknet_integration_tests/src/config_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ use std::io::Write;
use std::net::SocketAddr;
use std::path::PathBuf;

use serde_json::{json, Value};
use papyrus_config::dumping::{combine_config_map_and_pointers, SerializeConfig};
use serde_json::{json, Map, Value};
use starknet_sequencer_infra::component_definitions::{
LocalServerConfig,
RemoteClientConfig,
Expand All @@ -16,9 +17,14 @@ use starknet_sequencer_node::config::component_execution_config::{
ReactiveComponentExecutionConfig,
ReactiveComponentExecutionMode,
};
use starknet_sequencer_node::config::node_config::SequencerNodeConfig;
use starknet_sequencer_node::config::node_config::{
SequencerNodeConfig,
CONFIG_NON_POINTERS_WHITELIST,
CONFIG_POINTERS,
};
use starknet_sequencer_node::config::test_utils::RequiredParams;
use tracing::info;

// TODO(Tsabary): Move here all config-related functions from "integration_test_utils.rs".

const NODE_CONFIG_CHANGES_FILE_PATH: &str = "node_integration_test_config_changes.json";
Expand Down Expand Up @@ -47,30 +53,43 @@ macro_rules! config_fields_to_json {
};
}

/// Creates a config file for the sequencer node for the end to end integration test.
/// Creates a config file for the sequencer node for an integration test.
pub(crate) fn dump_config_file_changes(
config: &SequencerNodeConfig,
required_params: RequiredParams,
dir: PathBuf,
) -> PathBuf {
// Dump config changes file for the sequencer node.
// TODO(Tsabary): auto dump the entirety of RequiredParams fields.
let json_data = config_fields_to_json!(
let required_params_json = config_fields_to_json!(
required_params.chain_id,
required_params.eth_fee_token_address,
required_params.strk_fee_token_address,
required_params.validator_id,
config.rpc_state_reader_config.url,
config.batcher_config.storage.db_config.path_prefix,
config.http_server_config.ip,
config.http_server_config.port,
config.consensus_manager_config.consensus_config.start_height,
config.state_sync_config.storage_config.db_config.path_prefix,
config.state_sync_config.network_config.tcp_port,
);
let node_config_path = dump_json_data(json_data, NODE_CONFIG_CHANGES_FILE_PATH, dir);
assert!(node_config_path.exists(), "File does not exist: {:?}", node_config_path);

// Create the entire mapping of the config and the pointers, without the required params.
let config_as_map = combine_config_map_and_pointers(
config.dump(),
&CONFIG_POINTERS,
&CONFIG_NON_POINTERS_WHITELIST,
)
.unwrap();

dump_json_data(required_params_json.clone(), "required_params_json", PathBuf::from("."));
dump_json_data(config_as_map.clone(), "config_as_map", PathBuf::from("."));

// Extract only the required fields from the config map.
let mut preset = config_to_preset(&config_as_map);
dump_json_data(preset.clone(), "preset_before", PathBuf::from("."));

// Add the required params to the preset.
add_required_params_to_preset(&mut preset, &required_params_json);
dump_json_data(preset.clone(), "preset_after", PathBuf::from("."));

// Dump the preset to a file, return its path.
let node_config_path = dump_json_data(preset, NODE_CONFIG_CHANGES_FILE_PATH, dir);
assert!(node_config_path.exists(), "File does not exist: {:?}", node_config_path);
node_config_path
}

Expand Down Expand Up @@ -157,3 +176,126 @@ pub async fn get_remote_flow_test_config() -> Vec<ComponentConfig> {
get_non_http_component_config(gateway_socket).await,
]
}

/// Transforms a nested JSON dictionary object into a simplified JSON dictionary object by
/// extracting specific values from the inner dictionaries.
///
/// # Parameters
/// - `config_map`: A reference to a `serde_json::Value` that must be a JSON dictionary object. Each
/// key in the object maps to another JSON dictionary object.
///
/// # Returns
/// - A `serde_json::Value` dictionary object where:
/// - Each key is preserved from the top-level dictionary.
/// - Each value corresponds to the `"value"` field of the nested JSON dictionary under the
/// original key.
///
/// # Panics
/// This function panics if the provided `config_map` is not a JSON dictionary object, with a
/// descriptive error message: ```text
/// Config map is not a JSON object: <actual value>
/// ```
///
/// # Example
/// ```rust
/// use serde_json::{json, Value};
///
/// let input = json!({
/// "setting1": { "value": "preset1", "metadata": "info1" },
/// "setting2": { "value": "preset2", "metadata": "info2" },
/// "setting3": { "metadata": "info3" }
/// });
///
/// let result = config_to_preset(&input);
/// println!("{}", result);
/// ```
///
/// The output will be:
/// ```json
/// {
/// "setting1": "preset1",
/// "setting2": "preset2"
/// }
/// ```
fn config_to_preset(config_map: &Value) -> Value {
// Ensure the config_map is a JSON object.
if let Value::Object(map) = config_map {
let mut result = Map::new();

for (key, value) in map {
if let Value::Object(inner_map) = value {
// Extract the value.
if let Some(inner_value) = inner_map.get("value") {
// Add it to the result map
result.insert(key.clone(), inner_value.clone());
}
}
}

// Return the transformed result as a JSON object.
Value::Object(result)
} else {
panic!("Config map is not a JSON object: {:?}", config_map);
}
}

/// Merges required parameters into an existing preset JSON object.
///
/// # Parameters
/// - `preset`: A mutable reference to a `serde_json::Value` representing the preset. It must be a
/// JSON dictionary object where additional parameters will be added or updated.
/// - `required_params`: A reference to a `serde_json::Value` representing the required parameters.
/// It must also be a JSON dictionary object. Its keys and values will be merged into the
/// `preset`.
///
/// # Behavior
/// - For each key-value pair in `required_params`, the pair is inserted into `preset`.
/// - If a key already exists in `preset`, its value will be overwritten by the value from
/// `required_params`.
/// - Both `preset` and `required_params` must be JSON dictionary objects; otherwise, the function
/// panics.
///
/// # Panics
/// This function panics if either `preset` or `required_params` is not a JSON dictionary object
/// with the error: ```text
/// Expecting JSON object dictionary objects
/// ```
///
/// # Example
/// ```rust
/// use serde_json::{json, Value};
///
/// let mut preset = json!({
/// "param1": "value1",
/// "param2": "value2"
/// });
///
/// let required_params = json!({
/// "param2": "updated_value2",
/// "param3": "value3"
/// });
///
/// add_required_params_to_preset(&mut preset, &required_params);
///
/// println!("{}", preset);
/// ```
///
/// The output will be:
/// ```json
/// {
/// "param1": "value1",
/// "param2": "updated_value2",
/// "param3": "value3"
/// }
/// ```
fn add_required_params_to_preset(preset: &mut Value, required_params: &Value) {
if let (Value::Object(preset_map), Value::Object(required_params_map)) =
(preset, required_params)
{
for (key, value) in required_params_map {
preset_map.insert(key.clone(), value.clone());
}
} else {
panic!("Expecting JSON object dictionary objects");
}
}
10 changes: 8 additions & 2 deletions crates/starknet_integration_tests/src/end_to_end_integration.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use infra_utils::run_until::run_until;
use infra_utils::tracing::{CustomLogger, TraceLevel};
use mempool_test_utils::starknet_api_test_utils::{AccountId, MultiAccountTransactionGenerator};
use papyrus_execution::execution_utils::get_nonce_at;
use papyrus_storage::state::StateStorageReader;
Expand Down Expand Up @@ -43,7 +44,12 @@ async fn await_block(
let condition = |&latest_block_number: &BlockNumber| latest_block_number >= target_block_number;
let get_latest_block_number_closure = || async move { get_latest_block_number(storage_reader) };

run_until(interval, max_attempts, get_latest_block_number_closure, condition, None)
let logger = CustomLogger::new(
TraceLevel::Info,
Some("Waiting for storage to include block".to_string()),
);

run_until(interval, max_attempts, get_latest_block_number_closure, condition, Some(logger))
.await
.ok_or(())
}
Expand Down Expand Up @@ -85,7 +91,7 @@ pub async fn end_to_end_integration(mut tx_generator: MultiAccountTransactionGen
papyrus_storage::open_storage(integration_test_setup.batcher_storage_config)
.expect("Failed to open batcher's storage");

await_block(5000, EXPECTED_BLOCK_NUMBER, 15, &batcher_storage_reader)
await_block(5000, EXPECTED_BLOCK_NUMBER, 30, &batcher_storage_reader)
.await
.expect("Block number should have been reached.");

Expand Down

0 comments on commit 7db4a12

Please sign in to comment.