diff --git a/Cargo.lock b/Cargo.lock index 539fab75d6f..1d2cca94dc1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1369,7 +1369,7 @@ dependencies = [ [[package]] name = "cf-engine-dylib" -version = "1.7.0" +version = "1.8.0" dependencies = [ "chainflip-engine", "engine-proc-macros", @@ -1620,7 +1620,7 @@ dependencies = [ [[package]] name = "chainflip-api" -version = "1.7.0" +version = "1.8.0" dependencies = [ "anyhow", "async-trait", @@ -1667,7 +1667,7 @@ dependencies = [ [[package]] name = "chainflip-broker-api" -version = "1.7.0" +version = "1.8.0" dependencies = [ "anyhow", "chainflip-api", @@ -1688,7 +1688,7 @@ dependencies = [ [[package]] name = "chainflip-cli" -version = "1.7.0" +version = "1.8.0" dependencies = [ "anyhow", "bigdecimal", @@ -1709,7 +1709,7 @@ dependencies = [ [[package]] name = "chainflip-engine" -version = "1.7.0" +version = "1.8.0" dependencies = [ "anyhow", "async-broadcast 0.5.1", @@ -1846,7 +1846,7 @@ dependencies = [ [[package]] name = "chainflip-lp-api" -version = "1.7.0" +version = "1.8.0" dependencies = [ "anyhow", "cf-primitives", @@ -1872,7 +1872,7 @@ dependencies = [ [[package]] name = "chainflip-node" -version = "1.7.0" +version = "1.8.0" dependencies = [ "cf-chains", "cf-primitives", @@ -3260,7 +3260,7 @@ checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d" [[package]] name = "engine-proc-macros" -version = "1.7.0" +version = "1.8.0" dependencies = [ "engine-upgrade-utils", "proc-macro2", @@ -3270,7 +3270,7 @@ dependencies = [ [[package]] name = "engine-runner" -version = "1.7.0" +version = "1.8.0" dependencies = [ "anyhow", "assert_cmd", @@ -13173,7 +13173,7 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "state-chain-runtime" -version = "1.7.0" +version = "1.8.0" dependencies = [ "cf-amm", "cf-chains", diff --git a/api/bin/chainflip-broker-api/Cargo.toml b/api/bin/chainflip-broker-api/Cargo.toml index 153d1ec7ac5..d67f1bd6bd2 100644 --- a/api/bin/chainflip-broker-api/Cargo.toml +++ b/api/bin/chainflip-broker-api/Cargo.toml @@ -1,7 +1,7 @@ [package] authors = ["Chainflip team "] name = "chainflip-broker-api" -version = "1.7.0" +version = "1.8.0" edition = "2021" [package.metadata.deb] diff --git a/api/bin/chainflip-cli/Cargo.toml b/api/bin/chainflip-cli/Cargo.toml index bb93b70b218..3c5c73159de 100644 --- a/api/bin/chainflip-cli/Cargo.toml +++ b/api/bin/chainflip-cli/Cargo.toml @@ -3,7 +3,7 @@ authors = ["Chainflip team "] edition = '2021' build = 'build.rs' name = "chainflip-cli" -version = "1.7.0" +version = "1.8.0" [lints] workspace = true diff --git a/api/bin/chainflip-lp-api/Cargo.toml b/api/bin/chainflip-lp-api/Cargo.toml index 0a70e548957..5144e9b5f8e 100644 --- a/api/bin/chainflip-lp-api/Cargo.toml +++ b/api/bin/chainflip-lp-api/Cargo.toml @@ -1,7 +1,7 @@ [package] authors = ["Chainflip team "] name = "chainflip-lp-api" -version = "1.7.0" +version = "1.8.0" edition = "2021" [package.metadata.deb] diff --git a/api/lib/Cargo.toml b/api/lib/Cargo.toml index 25573c5dd18..4233323f685 100644 --- a/api/lib/Cargo.toml +++ b/api/lib/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "chainflip-api" -version = "1.7.0" +version = "1.8.0" edition = "2021" [lints] diff --git a/engine-dylib/Cargo.toml b/engine-dylib/Cargo.toml index 37e69e14221..8d5765abf8e 100644 --- a/engine-dylib/Cargo.toml +++ b/engine-dylib/Cargo.toml @@ -3,11 +3,11 @@ authors = ["Chainflip team "] build = 'build.rs' edition = '2021' name = "cf-engine-dylib" -version = "1.7.0" +version = "1.8.0" [lib] crate-type = ["cdylib"] -name = "chainflip_engine_v1_7_0" +name = "chainflip_engine_v1_8_0" path = 'src/lib.rs' [dependencies] diff --git a/engine-proc-macros/Cargo.toml b/engine-proc-macros/Cargo.toml index 10307db4926..287bf294220 100644 --- a/engine-proc-macros/Cargo.toml +++ b/engine-proc-macros/Cargo.toml @@ -5,7 +5,7 @@ edition = '2021' name = "engine-proc-macros" # The version here is the version that will be used for the generated code, and therefore will be the # suffix of the generated engine entrypoint. TODO: Fix this. -version = "1.7.0" +version = "1.8.0" [lib] proc-macro = true diff --git a/engine-runner-bin/Cargo.toml b/engine-runner-bin/Cargo.toml index 74235995879..d42f501c858 100644 --- a/engine-runner-bin/Cargo.toml +++ b/engine-runner-bin/Cargo.toml @@ -2,7 +2,7 @@ name = "engine-runner" description = "The central runner for the chainflip engine, it requires two shared library versions to run." # NB: When updating this version, you must update the debian assets appropriately too. -version = "1.7.0" +version = "1.8.0" authors = ["Chainflip team "] build = 'build.rs' edition = '2021' @@ -22,19 +22,19 @@ assets = [ # to specify this. We do this in the `chainflip-engine.service` files, so the user does not need to set it # manually. [ - "target/release/libchainflip_engine_v1_7_0.so", + "target/release/libchainflip_engine_v1_8_0.so", # This is the path where the engine dylib is searched for on linux. # As set in the build.rs file. - "usr/lib/chainflip-engine/libchainflip_engine_v1_7_0.so", + "usr/lib/chainflip-engine/libchainflip_engine_v1_8_0.so", "755", ], # The old version gets put into target/release by the package github actions workflow. # It downloads the correct version from the releases page. [ - "target/release/libchainflip_engine_v1_6_7.so", + "target/release/libchainflip_engine_v1_7_0.so", # This is the path where the engine dylib is searched for on linux. # As set in the build.rs file. - "usr/lib/chainflip-engine/libchainflip_engine_v1_6_7.so", + "usr/lib/chainflip-engine/libchainflip_engine_v1_7_0.so", "755", ], ] diff --git a/engine-runner-bin/src/main.rs b/engine-runner-bin/src/main.rs index f1314404a37..5b0fb96ad59 100644 --- a/engine-runner-bin/src/main.rs +++ b/engine-runner-bin/src/main.rs @@ -2,7 +2,7 @@ use engine_upgrade_utils::{CStrArray, NEW_VERSION, OLD_VERSION}; // Declare the entrypoints into each version of the engine mod old { - #[engine_proc_macros::link_engine_library_version("1.6.7")] + #[engine_proc_macros::link_engine_library_version("1.7.0")] extern "C" { pub fn cfe_entrypoint( c_args: engine_upgrade_utils::CStrArray, @@ -12,7 +12,7 @@ mod old { } mod new { - #[engine_proc_macros::link_engine_library_version("1.7.0")] + #[engine_proc_macros::link_engine_library_version("1.8.0")] extern "C" { fn cfe_entrypoint( c_args: engine_upgrade_utils::CStrArray, diff --git a/engine-upgrade-utils/src/lib.rs b/engine-upgrade-utils/src/lib.rs index b9c0296e152..0587a49d6ac 100644 --- a/engine-upgrade-utils/src/lib.rs +++ b/engine-upgrade-utils/src/lib.rs @@ -10,8 +10,8 @@ pub mod build_helpers; // rest of the places the version needs changing on build using the build scripts in each of the // relevant crates. // Should also check that the compatibility function below `args_compatible_with_old` is correct. -pub const OLD_VERSION: &str = "1.6.7"; -pub const NEW_VERSION: &str = "1.7.0"; +pub const OLD_VERSION: &str = "1.7.0"; +pub const NEW_VERSION: &str = "1.8.0"; pub const ENGINE_LIB_PREFIX: &str = "chainflip_engine_v"; pub const ENGINE_ENTRYPOINT_PREFIX: &str = "cfe_entrypoint_v"; diff --git a/engine/Cargo.toml b/engine/Cargo.toml index 14187d00fc4..9575a9542da 100644 --- a/engine/Cargo.toml +++ b/engine/Cargo.toml @@ -3,7 +3,7 @@ authors = ["Chainflip team "] build = 'build.rs' edition = '2021' name = "chainflip-engine" -version = "1.7.0" +version = "1.8.0" [lib] crate-type = ["lib"] diff --git a/state-chain/cf-integration-tests/src/lib.rs b/state-chain/cf-integration-tests/src/lib.rs index 0f5c09ffa18..0c456ad8d57 100644 --- a/state-chain/cf-integration-tests/src/lib.rs +++ b/state-chain/cf-integration-tests/src/lib.rs @@ -11,7 +11,6 @@ mod authorities; mod funding; mod genesis; mod governance; -mod migrations; mod new_epoch; mod solana; mod swapping; diff --git a/state-chain/cf-integration-tests/src/migrations/serialize_solana_broadcast.rs b/state-chain/cf-integration-tests/src/migrations/serialize_solana_broadcast.rs deleted file mode 100644 index be7b0d89b6e..00000000000 --- a/state-chain/cf-integration-tests/src/migrations/serialize_solana_broadcast.rs +++ /dev/null @@ -1,154 +0,0 @@ -use cf_chains::sol::{ - sol_tx_core::{CompiledInstruction, MessageHeader}, - SolMessage, SolPubkey, SolSignature, -}; - -use cf_chains::sol::{SolHash, SolanaTransactionData}; -use genesis::with_test_defaults; -use sp_runtime::AccountId32; - -use frame_support::traits::OnRuntimeUpgrade; -use pallet_cf_broadcast::BroadcastData; -use state_chain_runtime::{ - migrations::serialize_solana_broadcast::{self, old, SerializeSolanaBroadcastMigration}, - SolanaInstance, -}; - -use crate::*; - -use cf_chains::sol::SolTransaction; - -// Test data pulled from `state-chain/chains/src/sol/sol_tx_core.rs` -#[test] -fn test_migration() { - with_test_defaults().build().execute_with(|| { - let tx: SolTransaction = SolTransaction { - signatures: vec![ - SolSignature(hex_literal::hex!( - "d1144b223b6b600de4b2d96bdceb03573a3e9781953e4c668c57e505f017859d96543243b4d904dc2f02f2f5ab5db7ba4551c7e015e64078add4674ac2e7460c" - )), - ], - message: SolMessage { - header: MessageHeader { - num_required_signatures: 1, - num_readonly_signed_accounts: 0, - num_readonly_unsigned_accounts: 8, - }, - account_keys: vec![ - SolPubkey(hex_literal::hex!( - "2e8944a76efbece296221e736627f4528a947578263a1172a9786410702d2ef2" - )), - SolPubkey(hex_literal::hex!( - "22020a74fd97df45db96d2bbf4e485ccbec56945155ff8f668856be26c9de4a9" - )), - SolPubkey(hex_literal::hex!( - "79c03bceb9ddea819e956b2b332e87fbbf49fc8968df78488e88cfaa366f3036" - )), - SolPubkey(hex_literal::hex!( - "8cd28baa84f2067bbdf24513c2d44e44bf408f2e6da6e60762e3faa4a62a0adb" - )), - SolPubkey(hex_literal::hex!( - "8d9871ed5fb2ee05765af23b7cabcc0d6b08ed370bb9f616a0d4dea40a25f870" - )), - SolPubkey(hex_literal::hex!( - "b5b9d633289c8fd72fb05f33349bf4cc44e82add5d865311ae346d7c9a67b7dd" - )), - SolPubkey(hex_literal::hex!( - "f53a2f4350451db5595a75e231519bc2758798f72550e57487722e7cbe954dbc" - )), - SolPubkey(hex_literal::hex!( - "0000000000000000000000000000000000000000000000000000000000000000" - )), - SolPubkey(hex_literal::hex!( - "0306466fe5211732ffecadba72c39be7bc8ce5bbc5f7126b2c439b3a40000000" - )), - SolPubkey(hex_literal::hex!( - "06a7d517192c568ee08a845f73d29788cf035c3145b21ab344d8062ea9400000" - )), - SolPubkey(hex_literal::hex!( - "06ddf6e1d765a193d9cbe146ceeb79ac1cb485ed5f5b37913a8cf5857eff00a9" - )), - SolPubkey(hex_literal::hex!( - "0fb9ba52b1f09445f1e3a7508d59f0797923acf744fbe2da303fb06da859ee87" - )), - SolPubkey(hex_literal::hex!( - "72b5d2051d300b10b74314b7e25ace9998ca66eb2c7fbc10ef130dd67028293c" - )), - SolPubkey(hex_literal::hex!( - "a140fd3d05766f0087d57bf99df05731e894392ffcc8e8d7e960ba73c09824aa" - )), - SolPubkey(hex_literal::hex!( - "a1e031c8bc9bec3b610cf7b36eb3bf3aa40237c9e5be2c7893878578439eb00b" - )), - ], - recent_blockhash: SolHash(hex_literal::hex!( - "f7f02ac4729abaa97c01aa6526ba909c3bcb16c7f47c7e13dfdc5a1b15f647b4" - )) - .into(), - instructions: vec![ - CompiledInstruction { - program_id_index: 7, - accounts: hex_literal::hex!("030900").to_vec(), - data: hex_literal::hex!("04000000").to_vec(), - }, - CompiledInstruction { - program_id_index: 8, - accounts: vec![], - data: hex_literal::hex!("030a00000000000000").to_vec(), - }, - CompiledInstruction { - program_id_index: 8, - accounts: vec![], - data: hex_literal::hex!("0233620100").to_vec(), - }, - CompiledInstruction { - program_id_index: 12, - accounts: hex_literal::hex!("0e00040507").to_vec(), - data: hex_literal::hex!("8e24658f6c59298c080000000100000000000000ff").to_vec(), - }, - CompiledInstruction { - program_id_index: 12, - accounts: hex_literal::hex!("0e000d01020b0a0607").to_vec(), - data: hex_literal::hex!("494710642cb0c646080000000200000000000000ff06").to_vec(), - }, - ], - }, - }; - - old::AwaitingBroadcast::insert( - 22, - old::SolanaBroadcastData { - broadcast_id: 22, - transaction_payload: tx, - threshold_signature_payload: SolMessage::default(), - transaction_out_id: SolSignature::default(), - nominee: Some(AccountId32::from([11; 32])), - }, - ); - - let state = serialize_solana_broadcast::pre_upgrade_check().unwrap(); - SerializeSolanaBroadcastMigration::on_runtime_upgrade(); - serialize_solana_broadcast::post_upgrade_check(state).unwrap(); - - let expected_serialized_tx = hex_literal::hex!("01d1144b223b6b600de4b2d96bdceb03573a3e9781953e4c668c57e505f017859d96543243b4d904dc2f02f2f5ab5db7ba4551c7e015e64078add4674ac2e7460c0100080f2e8944a76efbece296221e736627f4528a947578263a1172a9786410702d2ef222020a74fd97df45db96d2bbf4e485ccbec56945155ff8f668856be26c9de4a979c03bceb9ddea819e956b2b332e87fbbf49fc8968df78488e88cfaa366f30368cd28baa84f2067bbdf24513c2d44e44bf408f2e6da6e60762e3faa4a62a0adb8d9871ed5fb2ee05765af23b7cabcc0d6b08ed370bb9f616a0d4dea40a25f870b5b9d633289c8fd72fb05f33349bf4cc44e82add5d865311ae346d7c9a67b7ddf53a2f4350451db5595a75e231519bc2758798f72550e57487722e7cbe954dbc00000000000000000000000000000000000000000000000000000000000000000306466fe5211732ffecadba72c39be7bc8ce5bbc5f7126b2c439b3a4000000006a7d517192c568ee08a845f73d29788cf035c3145b21ab344d8062ea940000006ddf6e1d765a193d9cbe146ceeb79ac1cb485ed5f5b37913a8cf5857eff00a90fb9ba52b1f09445f1e3a7508d59f0797923acf744fbe2da303fb06da859ee8772b5d2051d300b10b74314b7e25ace9998ca66eb2c7fbc10ef130dd67028293ca140fd3d05766f0087d57bf99df05731e894392ffcc8e8d7e960ba73c09824aaa1e031c8bc9bec3b610cf7b36eb3bf3aa40237c9e5be2c7893878578439eb00bf7f02ac4729abaa97c01aa6526ba909c3bcb16c7f47c7e13dfdc5a1b15f647b40507030309000404000000080009030a0000000000000008000502336201000c050e00040507158e24658f6c59298c080000000100000000000000ff0c090e000d01020b0a060716494710642cb0c646080000000200000000000000ff06").to_vec(); - - let mut broadcast_iter = - pallet_cf_broadcast::AwaitingBroadcast::::iter(); - let (first_broadcast_id, first_broadcast_data) = broadcast_iter.next().unwrap(); - assert!(broadcast_iter.next().is_none()); - - assert_eq!(first_broadcast_id, 22); - assert_eq!( - first_broadcast_data, - BroadcastData { - broadcast_id: 22, - transaction_payload: SolanaTransactionData { - serialized_transaction: expected_serialized_tx, - }, - threshold_signature_payload: SolMessage::default(), - transaction_out_id: SolSignature::default(), - nominee: Some(AccountId32::from([11; 32])), - } - ); - }); -} diff --git a/state-chain/node/Cargo.toml b/state-chain/node/Cargo.toml index 7e0b717a15a..696f3c78762 100644 --- a/state-chain/node/Cargo.toml +++ b/state-chain/node/Cargo.toml @@ -8,7 +8,7 @@ license = '' name = 'chainflip-node' publish = false repository = 'https://github.com/chainflip-io/chainflip-backend' -version = "1.7.0" +version = "1.8.0" [[bin]] name = 'chainflip-node' diff --git a/state-chain/pallets/cf-broadcast/src/migrations.rs b/state-chain/pallets/cf-broadcast/src/migrations.rs index 1d9cadbd3ed..455a20ceb46 100644 --- a/state-chain/pallets/cf-broadcast/src/migrations.rs +++ b/state-chain/pallets/cf-broadcast/src/migrations.rs @@ -1,13 +1,4 @@ use crate::Pallet; -use cf_runtime_upgrade_utilities::{PlaceholderMigration, VersionedMigration}; +use cf_runtime_upgrade_utilities::PlaceholderMigration; -mod initialize_broadcast_timeout_storage; -mod migrate_timeouts; -pub mod remove_aborted_broadcasts; - -pub type PalletMigration = ( - VersionedMigration, initialize_broadcast_timeout_storage::Migration, 6, 7>, - VersionedMigration, migrate_timeouts::Migration, 7, 8>, - PlaceholderMigration, 8>, - // Migration 8->9 is SerializeSolanaBroadcastMigration in runtime lib. -); +pub type PalletMigration = (PlaceholderMigration, 9>,); diff --git a/state-chain/pallets/cf-broadcast/src/migrations/initialize_broadcast_timeout_storage.rs b/state-chain/pallets/cf-broadcast/src/migrations/initialize_broadcast_timeout_storage.rs deleted file mode 100644 index 98237f9051a..00000000000 --- a/state-chain/pallets/cf-broadcast/src/migrations/initialize_broadcast_timeout_storage.rs +++ /dev/null @@ -1,92 +0,0 @@ -use frame_support::{traits::OnRuntimeUpgrade, weights::Weight}; -use old::maybe_get_timeout_for_type; - -use crate::*; - -// Constants copied from `runtime/src/constants.rs`, -// in order to use same timeout values as given in `node/src/chain_spec.rs` -pub const MILLISECONDS_PER_BLOCK_ETHEREUM: u32 = 14 * 1000; -pub const MILLISECONDS_PER_BLOCK_POLKADOT: u32 = 6 * 1000; -pub const MILLISECONDS_PER_BLOCK_ARBITRUM: u32 = 250; -pub const MILLISECONDS_PER_BLOCK_SOLANA: u32 = 400; - -pub const BLOCKS_PER_MINUTE_ETHEREUM: u32 = 60000 / MILLISECONDS_PER_BLOCK_ETHEREUM; -pub const BLOCKS_PER_MINUTE_POLKADOT: u32 = 60000 / MILLISECONDS_PER_BLOCK_POLKADOT; -pub const BLOCKS_PER_MINUTE_ARBITRUM: u32 = 60000 / MILLISECONDS_PER_BLOCK_ARBITRUM; -pub const BLOCKS_PER_MINUTE_SOLANA: u32 = 60000 / MILLISECONDS_PER_BLOCK_SOLANA; - -mod old { - use cf_primitives::BlockNumber; - - use super::*; - - // Same timeout values as previously defined in `#[pallet::constant]`s - // and same as currently used in `node/src/chain_spec.rs` - pub const ETHEREUM_BROADCAST_TIMEOUT: BlockNumber = 5 * BLOCKS_PER_MINUTE_ETHEREUM; // note, due to rounding, this is effectively ~4.7 min - pub const POLKADOT_BROADCAST_TIMEOUT: BlockNumber = 4 * BLOCKS_PER_MINUTE_POLKADOT; - pub const BITCOIN_BROADCAST_TIMEOUT: BlockNumber = 9; - pub const ARBITRUM_BROADCAST_TIMEOUT: BlockNumber = 2 * BLOCKS_PER_MINUTE_ARBITRUM; - pub const SOLANA_BROADCAST_TIMEOUT: BlockNumber = 4 * BLOCKS_PER_MINUTE_SOLANA; - - // For testing purposes we also have to set the timeout for the mock configuration, - // following `BROADCAST_EXPIRY_BLOCKS` in `mock.rs` - pub const MOCK_ETHEREUM_BROADCAST_TIMEOUT: BlockNumber = 4; - - pub fn maybe_get_timeout_for_type, I: 'static>( - ) -> Option> { - // Choose timeout value based on statically defined chain name. - // It should be the same as the previously used constants. - let timeout: ChainBlockNumberFor = match T::TargetChain::NAME { - "Ethereum" => old::ETHEREUM_BROADCAST_TIMEOUT, - "Polkadot" => old::POLKADOT_BROADCAST_TIMEOUT, - "Bitcoin" => old::BITCOIN_BROADCAST_TIMEOUT, - "Arbitrum" => old::ARBITRUM_BROADCAST_TIMEOUT, - "Solana" => old::SOLANA_BROADCAST_TIMEOUT, - "MockEthereum" => old::MOCK_ETHEREUM_BROADCAST_TIMEOUT, - _ => return None, // skip migration for unexpected chain name - } - .into(); - Some(timeout) - } -} - -pub struct Migration, I: 'static>(PhantomData<(T, I)>); - -impl, I: 'static> OnRuntimeUpgrade for Migration { - fn on_runtime_upgrade() -> Weight { - if let Some(timeout) = maybe_get_timeout_for_type::() { - BroadcastTimeout::::set(timeout); - } - - Weight::zero() - } - - #[cfg(feature = "try-runtime")] - fn post_upgrade(_state: Vec) -> Result<(), DispatchError> { - assert_eq!(BroadcastTimeout::::get(), maybe_get_timeout_for_type::().unwrap()); - Ok(()) - } -} - -#[cfg(test)] -mod migration_tests { - - #[test] - fn test_migration() { - use super::*; - use crate::mock::*; - - new_test_ext().execute_with(|| { - // Perform runtime migration. - super::Migration::::on_runtime_upgrade(); - #[cfg(feature = "try-runtime")] - super::Migration::::post_upgrade(vec![]).unwrap(); - - // Storage is initialized correctly - assert_eq!( - crate::BroadcastTimeout::::get(), - maybe_get_timeout_for_type::().unwrap() - ); - }); - } -} diff --git a/state-chain/pallets/cf-broadcast/src/migrations/migrate_timeouts.rs b/state-chain/pallets/cf-broadcast/src/migrations/migrate_timeouts.rs deleted file mode 100644 index 90a8ee32adb..00000000000 --- a/state-chain/pallets/cf-broadcast/src/migrations/migrate_timeouts.rs +++ /dev/null @@ -1,107 +0,0 @@ -use frame_support::{pallet_prelude::ValueQuery, traits::OnRuntimeUpgrade, weights::Weight}; - -use crate::*; - -mod old { - use super::*; - - #[frame_support::storage_alias] - pub type Timeouts, I: 'static> = StorageMap< - Pallet, - Twox64Concat, - BlockNumberFor, - BTreeSet<(BroadcastId, ::ValidatorId)>, - ValueQuery, - >; -} - -pub struct Migration, I: 'static>(PhantomData<(T, I)>); - -impl, I: 'static> OnRuntimeUpgrade for Migration { - fn on_runtime_upgrade() -> Weight { - // Instead of trying to translate the previous timeout into external chain blocks, - // we simply reset the remaining timeout duration to the new `BroadcastTimeout` value. - let new_timeout = T::ChainTracking::get_block_height() + BroadcastTimeout::::get(); - for (_, timeouts) in old::Timeouts::::drain() { - for (broadcast_id, nominee) in timeouts { - Timeouts::::append((new_timeout, broadcast_id, nominee)) - } - } - Weight::zero() - } - - #[cfg(feature = "try-runtime")] - fn pre_upgrade() -> Result, DispatchError> { - let mut timeouts = Vec::new(); - for (_, old_broadcast_ids) in old::Timeouts::::iter() { - for (old_broadcast_id, old_nominee) in old_broadcast_ids { - timeouts.push((old_broadcast_id, old_nominee)) - } - } - let data: MigrationData = MigrationData { - timeouts, - target_chainblock: T::ChainTracking::get_block_height() + - BroadcastTimeout::::get(), - }; - Ok(data.encode()) - } - - #[cfg(feature = "try-runtime")] - fn post_upgrade(state: Vec) -> Result<(), DispatchError> { - let data = MigrationData::::decode(&mut &state[..]).unwrap(); - let new_timeouts = Timeouts::::get(); - - // We don't know whether the timeout is set to exactly the `new_timeout` value or a higher - // one, because between getting the current block height in `pre_upgrade` and in - // `on_runtime_upgrade` some time might have passed. - for (broadcast_id, nominee) in data.timeouts { - let (new_timeout, _, _) = new_timeouts - .iter() - .find(|(_, id, nom)| (id, nom) == (&broadcast_id, &nominee)) - .unwrap(); - assert!(*new_timeout >= data.target_chainblock); - } - - // Make sure that the old map is empty - assert!(old::Timeouts::::iter().next().is_none()); - - Ok(()) - } -} - -#[derive(Encode, Decode)] -pub struct MigrationData, I: 'static> { - pub timeouts: Vec<(BroadcastId, ::ValidatorId)>, - pub target_chainblock: ChainBlockNumberFor, -} - -#[cfg(test)] -mod migration_tests { - #[test] - fn test_migration() { - use super::*; - use crate::mock::*; - - new_test_ext().execute_with(|| { - let target = frame_system::Pallet::::block_number() + - BroadcastTimeout::::get(); - - // Create a few timeouts to migrate - old::Timeouts::::set(target, BTreeSet::from([(0, 100), (1, 101), (3, 102)])); - old::Timeouts::::set(target + 1, BTreeSet::from([(4, 103), (5, 104)])); - - #[cfg(feature = "try-runtime")] - let state = super::Migration::::pre_upgrade().unwrap(); - - // increment block height - let new_height = >::ChainTracking::get_block_height() + 20; - >::ChainTracking::set_block_height(new_height); - - // Perform runtime migration. - super::Migration::::on_runtime_upgrade(); - - #[cfg(feature = "try-runtime")] - super::Migration::::post_upgrade(state).unwrap(); - }); - } -} diff --git a/state-chain/pallets/cf-broadcast/src/migrations/remove_aborted_broadcasts.rs b/state-chain/pallets/cf-broadcast/src/migrations/remove_aborted_broadcasts.rs deleted file mode 100644 index 3161b132771..00000000000 --- a/state-chain/pallets/cf-broadcast/src/migrations/remove_aborted_broadcasts.rs +++ /dev/null @@ -1,24 +0,0 @@ -use crate::*; - -// Highest stale aborted broadcasts as of 3/10/2024: -// Mainnet -pub const ETHEREUM_MAX_ABORTED_BROADCAST_BERGHAIN: BroadcastId = 11592; -pub const ARBITRUM_MAX_ABORTED_BROADCAST_BERGHAIN: BroadcastId = 426; -// Perseverance testnet -pub const ETHEREUM_MAX_ABORTED_BROADCAST_PERSEVERANCE: BroadcastId = 1609; -pub const ARBITRUM_MAX_ABORTED_BROADCAST_PERSEVERANCE: BroadcastId = 665; -pub const POLKADOT_MAX_ABORTED_BROADCAST_PERSEVERANCE: BroadcastId = 634; - -pub fn remove_stale_and_all_older, I: 'static>(latest_stale_broadcast: BroadcastId) { - AbortedBroadcasts::::mutate(|aborted| { - aborted.retain(|id| id > &latest_stale_broadcast); - }); -} - -#[cfg(feature = "try-runtime")] -pub fn assert_removed, I: 'static>(latest_stale_broadcast: BroadcastId) { - let aborted_broadcasts = AbortedBroadcasts::::get(); - if let Some(first) = aborted_broadcasts.first() { - assert!(*first > latest_stale_broadcast, "Aborted broadcast {first} was not removed"); - } -} diff --git a/state-chain/runtime/Cargo.toml b/state-chain/runtime/Cargo.toml index 57fdc6ed7b0..f71dc0f6298 100644 --- a/state-chain/runtime/Cargo.toml +++ b/state-chain/runtime/Cargo.toml @@ -1,6 +1,6 @@ [package] name = 'state-chain-runtime' -version = '1.7.0' +version = '1.8.0' authors = ['Chainflip Team '] edition = '2021' homepage = 'https://chainflip.io' diff --git a/state-chain/runtime/src/lib.rs b/state-chain/runtime/src/lib.rs index 81226256aac..4251a67ec31 100644 --- a/state-chain/runtime/src/lib.rs +++ b/state-chain/runtime/src/lib.rs @@ -18,7 +18,6 @@ use crate::{ }, Offence, }, - migrations::serialize_solana_broadcast::{NoopUpgrade, SerializeSolanaBroadcastMigration}, monitoring_apis::{ ActivateKeysBroadcastIds, AuthoritiesInfo, BtcUtxos, EpochState, ExternalChainsBlockHeight, FeeImbalance, FlipSupply, LastRuntimeUpgradeInfo, MonitoringData, OpenDepositChannels, @@ -50,7 +49,6 @@ use cf_chains::{ Arbitrum, Bitcoin, DefaultRetryPolicy, ForeignChain, Polkadot, Solana, TransactionBuilder, }; use cf_primitives::{BroadcastId, EpochIndex, NetworkEnvironment, STABLE_ASSET}; -use cf_runtime_upgrade_utilities::VersionedMigration; use cf_traits::{ AdjustedFeeEstimationApi, AssetConverter, BalanceApi, DummyEgressSuccessWitnesser, DummyIngressSource, GetBlockHeight, NoLimit, SwapLimits, SwapLimitsProvider, @@ -59,7 +57,6 @@ use codec::{alloc::string::ToString, Decode, Encode}; use core::ops::Range; use frame_support::{derive_impl, instances::*}; pub use frame_system::Call as SystemCall; -use migrations::add_liveness_electoral_system_solana::LivenessSettingsMigration; use pallet_cf_governance::GovCallHash; use pallet_cf_ingress_egress::{ ChannelAction, DepositWitness, IngressOrEgress, OwedAmount, TargetChainAsset, @@ -197,10 +194,10 @@ pub const VERSION: RuntimeVersion = RuntimeVersion { spec_name: create_runtime_str!("chainflip-node"), impl_name: create_runtime_str!("chainflip-node"), authoring_version: 1, - spec_version: 170, + spec_version: 180, impl_version: 1, apis: RUNTIME_API_VERSIONS, - transaction_version: 12, + transaction_version: 13, state_version: 1, }; @@ -1225,7 +1222,7 @@ type AllMigrations = ( // UPGRADE pallet_cf_environment::migrations::VersionUpdate, PalletMigrations, - MigrationsForV1_7, + MigrationsForV1_8, migrations::housekeeping::Migration, migrations::reap_old_accounts::Migration, ); @@ -1269,25 +1266,7 @@ type PalletMigrations = ( pallet_cf_cfe_interface::migrations::PalletMigration, ); -type MigrationsForV1_7 = ( - // Only the Solana Transaction type has changed - VersionedMigration< - pallet_cf_broadcast::Pallet, - SerializeSolanaBroadcastMigration, - 8, - 9, - >, - VersionedMigration, NoopUpgrade, 8, 9>, - VersionedMigration, NoopUpgrade, 8, 9>, - VersionedMigration, NoopUpgrade, 8, 9>, - VersionedMigration, NoopUpgrade, 8, 9>, - VersionedMigration< - pallet_cf_elections::Pallet, - LivenessSettingsMigration, - 0, - 1, - >, -); +type MigrationsForV1_8 = (); #[cfg(feature = "runtime-benchmarks")] #[macro_use] diff --git a/state-chain/runtime/src/migrations.rs b/state-chain/runtime/src/migrations.rs index ba3424202fd..187dd5164c2 100644 --- a/state-chain/runtime/src/migrations.rs +++ b/state-chain/runtime/src/migrations.rs @@ -1,6 +1,4 @@ //! Chainflip runtime storage migrations. -pub mod add_liveness_electoral_system_solana; pub mod housekeeping; pub mod reap_old_accounts; -pub mod serialize_solana_broadcast; diff --git a/state-chain/runtime/src/migrations/add_liveness_electoral_system_solana.rs b/state-chain/runtime/src/migrations/add_liveness_electoral_system_solana.rs deleted file mode 100644 index 9773b7cf2f5..00000000000 --- a/state-chain/runtime/src/migrations/add_liveness_electoral_system_solana.rs +++ /dev/null @@ -1,39 +0,0 @@ -use crate::*; -use frame_support::{pallet_prelude::Weight, storage::unhashed, traits::OnRuntimeUpgrade}; -use frame_system::pallet_prelude::BlockNumberFor; - -use pallet_cf_elections::{electoral_system::ElectoralSystem, Config, ElectoralSettings}; -#[cfg(feature = "try-runtime")] -use sp_runtime::DispatchError; - -use codec::{Decode, Encode}; - -pub struct LivenessSettingsMigration; - -const LIVENESS_CHECK_DURATION: BlockNumberFor = 10; - -// Because the Liveness electoral system is added to the end, and the rest of its types are the same -// we can simply append the encoded bytes to the raw storage. -impl OnRuntimeUpgrade for LivenessSettingsMigration { - fn on_runtime_upgrade() -> Weight { - for key in ElectoralSettings::::iter_keys() { - let mut raw_storage_at_key = unhashed::get_raw(&ElectoralSettings::< - Runtime, - SolanaInstance, - >::hashed_key_for(key)) - .expect("We just got the keys directly from the storage"); - raw_storage_at_key.extend(LIVENESS_CHECK_DURATION.encode()); - ElectoralSettings::::insert(key, <>::ElectoralSystem as ElectoralSystem>::ElectoralSettings::decode(&mut &raw_storage_at_key[..]).unwrap()); - } - - Weight::zero() - } - - #[cfg(feature = "try-runtime")] - fn post_upgrade(_state: Vec) -> Result<(), DispatchError> { - for (.., liveness_duration) in ElectoralSettings::::iter_values() { - assert_eq!(liveness_duration, LIVENESS_CHECK_DURATION); - } - Ok(()) - } -} diff --git a/state-chain/runtime/src/migrations/housekeeping.rs b/state-chain/runtime/src/migrations/housekeeping.rs index 8e8a296285f..2f99e2ae964 100644 --- a/state-chain/runtime/src/migrations/housekeeping.rs +++ b/state-chain/runtime/src/migrations/housekeeping.rs @@ -1,8 +1,6 @@ use crate::Runtime; -use cf_chains::instances::{ArbitrumInstance, EthereumInstance, PolkadotInstance}; use cf_runtime_upgrade_utilities::genesis_hashes; use frame_support::{traits::OnRuntimeUpgrade, weights::Weight}; -use pallet_cf_broadcast::migrations::remove_aborted_broadcasts; #[cfg(feature = "try-runtime")] use sp_runtime::DispatchError; #[cfg(feature = "try-runtime")] @@ -14,25 +12,10 @@ impl OnRuntimeUpgrade for Migration { fn on_runtime_upgrade() -> Weight { match genesis_hashes::genesis_hash::() { genesis_hashes::BERGHAIN => { - log::info!("🧹 Housekeeping, removing stale aborted broadcasts"); - remove_aborted_broadcasts::remove_stale_and_all_older::( - remove_aborted_broadcasts::ETHEREUM_MAX_ABORTED_BROADCAST_BERGHAIN, - ); - remove_aborted_broadcasts::remove_stale_and_all_older::( - remove_aborted_broadcasts::ARBITRUM_MAX_ABORTED_BROADCAST_BERGHAIN, - ); + log::info!("🧹 No housekeeping required for Berghain."); }, genesis_hashes::PERSEVERANCE => { - log::info!("🧹 Housekeeping, removing stale aborted broadcasts"); - remove_aborted_broadcasts::remove_stale_and_all_older::( - remove_aborted_broadcasts::ETHEREUM_MAX_ABORTED_BROADCAST_PERSEVERANCE, - ); - remove_aborted_broadcasts::remove_stale_and_all_older::( - remove_aborted_broadcasts::ARBITRUM_MAX_ABORTED_BROADCAST_PERSEVERANCE, - ); - remove_aborted_broadcasts::remove_stale_and_all_older::( - remove_aborted_broadcasts::POLKADOT_MAX_ABORTED_BROADCAST_PERSEVERANCE, - ); + log::info!("🧹 No housekeeping required for Perseverance."); }, genesis_hashes::SISYPHOS => { log::info!("🧹 No housekeeping required for Sisyphos."); @@ -45,37 +28,6 @@ impl OnRuntimeUpgrade for Migration { #[cfg(feature = "try-runtime")] fn post_upgrade(_state: Vec) -> Result<(), DispatchError> { - match genesis_hashes::genesis_hash::() { - genesis_hashes::BERGHAIN => { - log::info!( - "Housekeeping post_upgrade, checking stale aborted broadcasts are removed." - ); - remove_aborted_broadcasts::assert_removed::( - remove_aborted_broadcasts::ETHEREUM_MAX_ABORTED_BROADCAST_BERGHAIN, - ); - remove_aborted_broadcasts::assert_removed::( - remove_aborted_broadcasts::ARBITRUM_MAX_ABORTED_BROADCAST_BERGHAIN, - ); - }, - genesis_hashes::PERSEVERANCE => { - log::info!( - "Housekeeping post_upgrade, checking stale aborted broadcasts are removed." - ); - remove_aborted_broadcasts::assert_removed::( - remove_aborted_broadcasts::ETHEREUM_MAX_ABORTED_BROADCAST_PERSEVERANCE, - ); - remove_aborted_broadcasts::assert_removed::( - remove_aborted_broadcasts::ARBITRUM_MAX_ABORTED_BROADCAST_PERSEVERANCE, - ); - remove_aborted_broadcasts::assert_removed::( - remove_aborted_broadcasts::POLKADOT_MAX_ABORTED_BROADCAST_PERSEVERANCE, - ); - }, - genesis_hashes::SISYPHOS => { - log::info!("Skipping housekeeping post_upgrade for Sisyphos."); - }, - _ => {}, - } Ok(()) } } diff --git a/state-chain/runtime/src/migrations/serialize_solana_broadcast.rs b/state-chain/runtime/src/migrations/serialize_solana_broadcast.rs deleted file mode 100644 index 7c262ce6a69..00000000000 --- a/state-chain/runtime/src/migrations/serialize_solana_broadcast.rs +++ /dev/null @@ -1,91 +0,0 @@ -use frame_support::traits::OnRuntimeUpgrade; -use pallet_cf_broadcast::BroadcastData; - -use crate::*; -use frame_support::pallet_prelude::Weight; -use sp_runtime::DispatchError; - -use cf_chains::sol::{SolTransaction, SolanaTransactionData}; -use codec::{Decode, Encode}; - -pub mod old { - use cf_chains::sol::{SolMessage, SolSignature}; - use cf_primitives::BroadcastId; - use frame_support::{pallet_prelude::OptionQuery, Twox64Concat}; - - use super::*; - - #[derive(PartialEq, Eq, Encode, Decode)] - pub struct SolanaBroadcastData { - pub broadcast_id: BroadcastId, - pub transaction_payload: SolTransaction, - pub threshold_signature_payload: SolMessage, - pub transaction_out_id: SolSignature, - pub nominee: Option<::AccountId>, - } - - #[frame_support::storage_alias] - pub type AwaitingBroadcast = - StorageMap; -} - -pub struct SerializeSolanaBroadcastMigration; - -// Tests for this migration are in: -// state-chain/cf-integration-tests/src/migrations/serialize_solana_broadcast.rs -impl OnRuntimeUpgrade for SerializeSolanaBroadcastMigration { - #[cfg(feature = "try-runtime")] - fn pre_upgrade() -> Result, DispatchError> { - pre_upgrade_check() - } - - fn on_runtime_upgrade() -> Weight { - pallet_cf_broadcast::AwaitingBroadcast::::translate_values::< - old::SolanaBroadcastData, - _, - >(|old_sol_broadcast_data| { - Some(BroadcastData:: { - broadcast_id: old_sol_broadcast_data.broadcast_id, - transaction_payload: SolanaTransactionData { - serialized_transaction: old_sol_broadcast_data - .transaction_payload - .finalize_and_serialize() - .ok()?, - }, - threshold_signature_payload: old_sol_broadcast_data.threshold_signature_payload, - transaction_out_id: old_sol_broadcast_data.transaction_out_id, - nominee: old_sol_broadcast_data.nominee, - }) - }); - - Weight::zero() - } - - #[cfg(feature = "try-runtime")] - fn post_upgrade(state: Vec) -> Result<(), DispatchError> { - post_upgrade_check(state) - } -} - -pub fn pre_upgrade_check() -> Result, DispatchError> { - Ok((old::AwaitingBroadcast::iter().count() as u64).encode()) -} - -pub fn post_upgrade_check(state: Vec) -> Result<(), DispatchError> { - let pre_awaiting_broadcast_count = ::decode(&mut state.as_slice()) - .map_err(|_| DispatchError::from("Failed to decode state"))?; - - let post_awaiting_broadcast_count = - pallet_cf_broadcast::AwaitingBroadcast::::iter().count() as u64; - - assert_eq!(pre_awaiting_broadcast_count, post_awaiting_broadcast_count); - Ok(()) -} - -pub struct NoopUpgrade; - -impl OnRuntimeUpgrade for NoopUpgrade { - fn on_runtime_upgrade() -> Weight { - Weight::zero() - } -}