diff --git a/Cargo.lock b/Cargo.lock index ce7a552f08..20bc1efbe5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "Inflector" @@ -848,7 +848,16 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" dependencies = [ - "bit-vec", + "bit-vec 0.6.3", +] + +[[package]] +name = "bit-set" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" +dependencies = [ + "bit-vec 0.8.0", ] [[package]] @@ -857,6 +866,12 @@ version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" +[[package]] +name = "bit-vec" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" + [[package]] name = "bitcoin" version = "0.32.4" @@ -1273,7 +1288,7 @@ dependencies = [ "scale-info", "serde", "sp-core 34.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "utilities", ] @@ -1287,7 +1302,7 @@ dependencies = [ "scale-info", "serde", "sp-core 34.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -1323,6 +1338,7 @@ dependencies = [ "rlp", "scale-info", "serde", + "serde-big-array", "serde_bytes", "serde_json", "serde_with", @@ -1332,7 +1348,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "ss58-registry", "strum 0.26.3", "thiserror", @@ -1404,7 +1420,7 @@ dependencies = [ "sp-offchain", "sp-runtime 39.0.0", "sp-session", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-timestamp", "sp-transaction-pool", "sp-version", @@ -1427,7 +1443,7 @@ dependencies = [ "sp-arithmetic 26.0.0", "sp-core 34.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "strum 0.26.3", "strum_macros 0.26.4", "utilities", @@ -1454,7 +1470,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -1468,7 +1484,7 @@ dependencies = [ "parity-scale-codec", "rand", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -1501,7 +1517,7 @@ dependencies = [ "serde", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -1515,7 +1531,7 @@ dependencies = [ "hex", "parity-scale-codec", "scale-info", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -3985,7 +4001,7 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "fork-tree" version = "13.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "parity-scale-codec", ] @@ -4018,7 +4034,7 @@ checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" [[package]] name = "frame-benchmarking" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "frame-support", "frame-support-procedural", @@ -4042,7 +4058,7 @@ dependencies = [ [[package]] name = "frame-benchmarking-cli" version = "42.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "Inflector", "array-bytes", @@ -4092,7 +4108,7 @@ dependencies = [ [[package]] name = "frame-executive" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "aquamarine", "frame-support", @@ -4133,7 +4149,7 @@ dependencies = [ [[package]] name = "frame-metadata-hash-extension" version = "0.5.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "array-bytes", "docify", @@ -4148,7 +4164,7 @@ dependencies = [ [[package]] name = "frame-support" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "aquamarine", "array-bytes", @@ -4171,7 +4187,7 @@ dependencies = [ "sp-arithmetic 26.0.0", "sp-core 34.0.0", "sp-crypto-hashing-proc-macro", - "sp-debug-derive 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-debug-derive 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-genesis-builder", "sp-inherents", "sp-io 38.0.0", @@ -4179,7 +4195,7 @@ dependencies = [ "sp-runtime 39.0.0", "sp-staking", "sp-state-machine 0.43.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-tracing 17.0.0", "sp-weights 31.0.0", "static_assertions", @@ -4189,7 +4205,7 @@ dependencies = [ [[package]] name = "frame-support-procedural" version = "30.0.2" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "Inflector", "cfg-expr", @@ -4201,14 +4217,14 @@ dependencies = [ "proc-macro-warning 1.0.2", "proc-macro2", "quote", - "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "syn 2.0.89", ] [[package]] name = "frame-support-procedural-tools" version = "13.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "frame-support-procedural-tools-derive", "proc-macro-crate 3.2.0", @@ -4220,7 +4236,7 @@ dependencies = [ [[package]] name = "frame-support-procedural-tools-derive" version = "12.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "proc-macro2", "quote", @@ -4230,7 +4246,7 @@ dependencies = [ [[package]] name = "frame-system" version = "37.1.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "cfg-if", "docify", @@ -4242,7 +4258,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-version", "sp-weights 31.0.0", ] @@ -4250,7 +4266,7 @@ dependencies = [ [[package]] name = "frame-system-benchmarking" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "frame-benchmarking", "frame-support", @@ -4264,7 +4280,7 @@ dependencies = [ [[package]] name = "frame-system-rpc-runtime-api" version = "34.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "docify", "parity-scale-codec", @@ -4274,7 +4290,7 @@ dependencies = [ [[package]] name = "frame-try-runtime" version = "0.43.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "frame-support", "parity-scale-codec", @@ -6034,7 +6050,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55cb077ad656299f160924eb2912aa147d7339ea7d69e1b5517326fdcec3c1ca" dependencies = [ "ascii-canvas", - "bit-set", + "bit-set 0.5.3", "ena", "itertools 0.11.0", "lalrpop-util", @@ -7643,7 +7659,7 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" dependencies = [ - "proc-macro-crate 1.1.3", + "proc-macro-crate 3.2.0", "proc-macro2", "quote", "syn 2.0.89", @@ -7819,7 +7835,7 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "pallet-aura" version = "36.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "frame-support", "frame-system", @@ -7835,7 +7851,7 @@ dependencies = [ [[package]] name = "pallet-authorship" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "frame-support", "frame-system", @@ -7862,7 +7878,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -7884,7 +7900,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "utilities", ] @@ -7908,7 +7924,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -7925,7 +7941,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -7946,7 +7962,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -7966,11 +7982,13 @@ dependencies = [ "log", "nanorand", "parity-scale-codec", + "proptest", + "proptest-derive", "rand", "scale-info", "serde", "sp-core 34.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "utilities", ] @@ -7995,7 +8013,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -8018,7 +8036,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "utilities", ] @@ -8040,7 +8058,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -8063,7 +8081,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -8082,7 +8100,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -8107,7 +8125,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "strum 0.26.3", "strum_macros 0.26.4", ] @@ -8131,7 +8149,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -8156,7 +8174,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "utilities", ] @@ -8180,7 +8198,7 @@ dependencies = [ "sp-io 38.0.0", "sp-runtime 39.0.0", "sp-staking", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -8206,7 +8224,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -8232,7 +8250,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "utilities", ] @@ -8254,7 +8272,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -8282,7 +8300,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "utilities", ] @@ -8306,7 +8324,7 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "utilities", ] @@ -8328,14 +8346,14 @@ dependencies = [ "sp-core 34.0.0", "sp-io 38.0.0", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "utilities", ] [[package]] name = "pallet-grandpa" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "frame-benchmarking", "frame-support", @@ -8357,7 +8375,7 @@ dependencies = [ [[package]] name = "pallet-session" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "frame-support", "frame-system", @@ -8378,7 +8396,7 @@ dependencies = [ [[package]] name = "pallet-timestamp" version = "36.0.1" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "docify", "frame-benchmarking", @@ -8397,7 +8415,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "frame-support", "frame-system", @@ -8412,7 +8430,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc" version = "40.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "jsonrpsee 0.23.2", "pallet-transaction-payment-rpc-runtime-api", @@ -8428,7 +8446,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc-runtime-api" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "pallet-transaction-payment", "parity-scale-codec", @@ -8445,7 +8463,7 @@ checksum = "4e69bf016dc406eff7d53a7d3f7cf1c2e72c82b9088aac1118591e36dd2cd3e9" dependencies = [ "bitcoin_hashes 0.13.0", "rand", - "rand_core 0.5.1", + "rand_core 0.6.4", "serde", "unicode-normalization", ] @@ -8473,9 +8491,9 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "3.6.12" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" +checksum = "8be4817d39f3272f69c59fe05d0535ae6456c2dc2fa1ba02910296c7e0a5c590" dependencies = [ "arrayvec 0.7.6", "bitvec", @@ -8483,19 +8501,20 @@ dependencies = [ "bytes", "impl-trait-for-tuples", "parity-scale-codec-derive", + "rustversion", "serde", ] [[package]] name = "parity-scale-codec-derive" -version = "3.6.12" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" +checksum = "8781a75c6205af67215f382092b6e0a4ff3734798523e69073d4bcd294ec767b" dependencies = [ "proc-macro-crate 3.2.0", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.89", ] [[package]] @@ -9199,10 +9218,12 @@ dependencies = [ [[package]] name = "proptest" -version = "1.5.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d" +checksum = "14cae93065090804185d3b75f0bf93b8eeda30c7a9b4a33d3bdb3988d6229e50" dependencies = [ + "bit-set 0.8.0", + "bit-vec 0.8.0", "bitflags 2.6.0", "lazy_static", "num-traits", @@ -9210,9 +9231,22 @@ dependencies = [ "rand_chacha", "rand_xorshift", "regex-syntax 0.8.5", + "rusty-fork", + "tempfile", "unarray", ] +[[package]] +name = "proptest-derive" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee1c9ac207483d5e7db4940700de86a9aae46ef90c48b57f99fe7edb8345e49" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.89", +] + [[package]] name = "prost" version = "0.11.9" @@ -10227,6 +10261,18 @@ version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" +[[package]] +name = "rusty-fork" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +dependencies = [ + "fnv", + "quick-error", + "tempfile", + "wait-timeout", +] + [[package]] name = "ruzstd" version = "0.5.0" @@ -10285,7 +10331,7 @@ dependencies = [ [[package]] name = "sc-allocator" version = "29.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "log", "sp-core 34.0.0", @@ -10296,7 +10342,7 @@ dependencies = [ [[package]] name = "sc-basic-authorship" version = "0.44.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "futures", "futures-timer", @@ -10318,7 +10364,7 @@ dependencies = [ [[package]] name = "sc-block-builder" version = "0.42.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "parity-scale-codec", "sp-api", @@ -10333,7 +10379,7 @@ dependencies = [ [[package]] name = "sc-chain-spec" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "array-bytes", "docify", @@ -10349,7 +10395,7 @@ dependencies = [ "serde_json", "sp-blockchain", "sp-core 34.0.0", - "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-genesis-builder", "sp-io 38.0.0", "sp-runtime 39.0.0", @@ -10360,7 +10406,7 @@ dependencies = [ [[package]] name = "sc-chain-spec-derive" version = "12.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "proc-macro-crate 3.2.0", "proc-macro2", @@ -10371,7 +10417,7 @@ dependencies = [ [[package]] name = "sc-cli" version = "0.46.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "array-bytes", "chrono", @@ -10402,7 +10448,7 @@ dependencies = [ "sp-core 34.0.0", "sp-keyring", "sp-keystore 0.40.0", - "sp-panic-handler 13.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-panic-handler 13.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-runtime 39.0.0", "sp-version", "thiserror", @@ -10412,7 +10458,7 @@ dependencies = [ [[package]] name = "sc-client-api" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "fnv", "futures", @@ -10439,7 +10485,7 @@ dependencies = [ [[package]] name = "sc-client-db" version = "0.44.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "hash-db 0.16.0", "kvdb", @@ -10465,7 +10511,7 @@ dependencies = [ [[package]] name = "sc-consensus" version = "0.43.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "async-trait", "futures", @@ -10489,7 +10535,7 @@ dependencies = [ [[package]] name = "sc-consensus-aura" version = "0.44.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "async-trait", "futures", @@ -10518,7 +10564,7 @@ dependencies = [ [[package]] name = "sc-consensus-grandpa" version = "0.29.1" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "ahash 0.8.11", "array-bytes", @@ -10552,7 +10598,7 @@ dependencies = [ "sp-consensus", "sp-consensus-grandpa", "sp-core 34.0.0", - "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-keystore 0.40.0", "sp-runtime 39.0.0", "substrate-prometheus-endpoint", @@ -10562,7 +10608,7 @@ dependencies = [ [[package]] name = "sc-consensus-grandpa-rpc" version = "0.29.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "finality-grandpa", "futures", @@ -10582,7 +10628,7 @@ dependencies = [ [[package]] name = "sc-consensus-slots" version = "0.43.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "async-trait", "futures", @@ -10605,7 +10651,7 @@ dependencies = [ [[package]] name = "sc-executor" version = "0.40.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "parity-scale-codec", "parking_lot 0.12.3", @@ -10617,7 +10663,7 @@ dependencies = [ "sp-core 34.0.0", "sp-externalities 0.29.0", "sp-io 38.0.0", - "sp-panic-handler 13.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-panic-handler 13.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-runtime-interface 28.0.0", "sp-trie 37.0.0", "sp-version", @@ -10628,7 +10674,7 @@ dependencies = [ [[package]] name = "sc-executor-common" version = "0.35.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "polkavm", "sc-allocator", @@ -10641,7 +10687,7 @@ dependencies = [ [[package]] name = "sc-executor-polkavm" version = "0.32.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "log", "polkavm", @@ -10652,7 +10698,7 @@ dependencies = [ [[package]] name = "sc-executor-wasmtime" version = "0.35.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "anyhow", "cfg-if", @@ -10670,7 +10716,7 @@ dependencies = [ [[package]] name = "sc-informant" version = "0.43.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "ansi_term", "futures", @@ -10687,7 +10733,7 @@ dependencies = [ [[package]] name = "sc-keystore" version = "33.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "array-bytes", "parking_lot 0.12.3", @@ -10701,7 +10747,7 @@ dependencies = [ [[package]] name = "sc-mixnet" version = "0.14.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "array-bytes", "arrayvec 0.7.6", @@ -10730,7 +10776,7 @@ dependencies = [ [[package]] name = "sc-network" version = "0.44.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "array-bytes", "async-channel 1.9.0", @@ -10781,7 +10827,7 @@ dependencies = [ [[package]] name = "sc-network-common" version = "0.43.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "async-trait", "bitflags 1.3.2", @@ -10799,7 +10845,7 @@ dependencies = [ [[package]] name = "sc-network-gossip" version = "0.44.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "ahash 0.8.11", "futures", @@ -10818,7 +10864,7 @@ dependencies = [ [[package]] name = "sc-network-light" version = "0.43.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "array-bytes", "async-channel 1.9.0", @@ -10839,7 +10885,7 @@ dependencies = [ [[package]] name = "sc-network-sync" version = "0.43.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "array-bytes", "async-channel 1.9.0", @@ -10876,7 +10922,7 @@ dependencies = [ [[package]] name = "sc-network-transactions" version = "0.43.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "array-bytes", "futures", @@ -10895,7 +10941,7 @@ dependencies = [ [[package]] name = "sc-network-types" version = "0.12.1" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "bs58 0.5.1", "ed25519-dalek", @@ -10912,7 +10958,7 @@ dependencies = [ [[package]] name = "sc-offchain" version = "39.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "array-bytes", "bytes", @@ -10946,7 +10992,7 @@ dependencies = [ [[package]] name = "sc-proposer-metrics" version = "0.18.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "log", "substrate-prometheus-endpoint", @@ -10955,7 +11001,7 @@ dependencies = [ [[package]] name = "sc-rpc" version = "39.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "futures", "jsonrpsee 0.23.2", @@ -10987,7 +11033,7 @@ dependencies = [ [[package]] name = "sc-rpc-api" version = "0.43.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "jsonrpsee 0.23.2", "parity-scale-codec", @@ -11007,7 +11053,7 @@ dependencies = [ [[package]] name = "sc-rpc-server" version = "16.0.2" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "forwarded-header-value", "futures", @@ -11029,7 +11075,7 @@ dependencies = [ [[package]] name = "sc-rpc-spec-v2" version = "0.44.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "array-bytes", "futures", @@ -11061,7 +11107,7 @@ dependencies = [ [[package]] name = "sc-service" version = "0.45.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "async-trait", "directories", @@ -11125,7 +11171,7 @@ dependencies = [ [[package]] name = "sc-state-db" version = "0.36.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "log", "parity-scale-codec", @@ -11136,7 +11182,7 @@ dependencies = [ [[package]] name = "sc-sysinfo" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "derive_more 0.99.18", "futures", @@ -11149,15 +11195,15 @@ dependencies = [ "serde", "serde_json", "sp-core 34.0.0", - "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-io 38.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] name = "sc-telemetry" version = "24.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "chrono", "futures", @@ -11177,7 +11223,7 @@ dependencies = [ [[package]] name = "sc-tracing" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "ansi_term", "chrono", @@ -11207,7 +11253,7 @@ dependencies = [ [[package]] name = "sc-tracing-proc-macro" version = "11.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "proc-macro-crate 3.2.0", "proc-macro2", @@ -11218,7 +11264,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "async-trait", "futures", @@ -11234,7 +11280,7 @@ dependencies = [ "sp-api", "sp-blockchain", "sp-core 34.0.0", - "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-runtime 39.0.0", "sp-tracing 17.0.0", "sp-transaction-pool", @@ -11245,7 +11291,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool-api" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "async-trait", "futures", @@ -11261,7 +11307,7 @@ dependencies = [ [[package]] name = "sc-utils" version = "17.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "async-channel 1.9.0", "futures", @@ -11683,6 +11729,15 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-big-array" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11fc7cc2c76d73e0f27ee52abbd64eec84d46f370c88371120433196934e4b7f" +dependencies = [ + "serde", +] + [[package]] name = "serde_bytes" version = "0.11.15" @@ -12205,7 +12260,7 @@ dependencies = [ [[package]] name = "sp-api" version = "34.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "docify", "hash-db 0.16.0", @@ -12227,7 +12282,7 @@ dependencies = [ [[package]] name = "sp-api-proc-macro" version = "20.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "Inflector", "blake2 0.10.6", @@ -12255,7 +12310,7 @@ dependencies = [ [[package]] name = "sp-application-crypto" version = "38.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "parity-scale-codec", "scale-info", @@ -12282,7 +12337,7 @@ dependencies = [ [[package]] name = "sp-arithmetic" version = "26.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "docify", "integer-sqrt", @@ -12296,7 +12351,7 @@ dependencies = [ [[package]] name = "sp-block-builder" version = "34.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "sp-api", "sp-inherents", @@ -12306,7 +12361,7 @@ dependencies = [ [[package]] name = "sp-blockchain" version = "37.0.1" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "futures", "parity-scale-codec", @@ -12325,7 +12380,7 @@ dependencies = [ [[package]] name = "sp-consensus" version = "0.40.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "async-trait", "futures", @@ -12340,7 +12395,7 @@ dependencies = [ [[package]] name = "sp-consensus-aura" version = "0.40.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "async-trait", "parity-scale-codec", @@ -12356,7 +12411,7 @@ dependencies = [ [[package]] name = "sp-consensus-grandpa" version = "21.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "finality-grandpa", "log", @@ -12373,7 +12428,7 @@ dependencies = [ [[package]] name = "sp-consensus-slots" version = "0.40.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "parity-scale-codec", "scale-info", @@ -12431,7 +12486,7 @@ dependencies = [ [[package]] name = "sp-core" version = "34.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "array-bytes", "bitflags 1.3.2", @@ -12460,11 +12515,11 @@ dependencies = [ "secp256k1 0.28.2", "secrecy", "serde", - "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", - "sp-debug-derive 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", + "sp-debug-derive 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-externalities 0.29.0", "sp-runtime-interface 28.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-storage 21.0.0", "ss58-registry", "substrate-bip39 0.6.0", @@ -12491,7 +12546,7 @@ dependencies = [ [[package]] name = "sp-crypto-hashing" version = "0.1.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "blake2b_simd", "byteorder", @@ -12504,17 +12559,17 @@ dependencies = [ [[package]] name = "sp-crypto-hashing-proc-macro" version = "0.1.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "quote", - "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "syn 2.0.89", ] [[package]] name = "sp-database" version = "10.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "kvdb", "parking_lot 0.12.3", @@ -12534,7 +12589,7 @@ dependencies = [ [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "proc-macro2", "quote", @@ -12556,7 +12611,7 @@ dependencies = [ [[package]] name = "sp-externalities" version = "0.29.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "environmental", "parity-scale-codec", @@ -12566,7 +12621,7 @@ dependencies = [ [[package]] name = "sp-genesis-builder" version = "0.15.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "parity-scale-codec", "scale-info", @@ -12578,7 +12633,7 @@ dependencies = [ [[package]] name = "sp-inherents" version = "34.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "async-trait", "impl-trait-for-tuples", @@ -12618,7 +12673,7 @@ dependencies = [ [[package]] name = "sp-io" version = "38.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "bytes", "docify", @@ -12630,7 +12685,7 @@ dependencies = [ "rustversion", "secp256k1 0.28.2", "sp-core 34.0.0", - "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-externalities 0.29.0", "sp-keystore 0.40.0", "sp-runtime-interface 28.0.0", @@ -12644,7 +12699,7 @@ dependencies = [ [[package]] name = "sp-keyring" version = "39.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "sp-core 34.0.0", "sp-runtime 39.0.0", @@ -12666,7 +12721,7 @@ dependencies = [ [[package]] name = "sp-keystore" version = "0.40.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "parity-scale-codec", "parking_lot 0.12.3", @@ -12677,7 +12732,7 @@ dependencies = [ [[package]] name = "sp-maybe-compressed-blob" version = "11.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "thiserror", "zstd 0.12.4", @@ -12686,7 +12741,7 @@ dependencies = [ [[package]] name = "sp-metadata-ir" version = "0.7.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "frame-metadata 16.0.0", "parity-scale-codec", @@ -12696,7 +12751,7 @@ dependencies = [ [[package]] name = "sp-mixnet" version = "0.12.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "parity-scale-codec", "scale-info", @@ -12707,7 +12762,7 @@ dependencies = [ [[package]] name = "sp-offchain" version = "34.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "sp-api", "sp-core 34.0.0", @@ -12728,7 +12783,7 @@ dependencies = [ [[package]] name = "sp-panic-handler" version = "13.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "backtrace", "lazy_static", @@ -12738,7 +12793,7 @@ dependencies = [ [[package]] name = "sp-rpc" version = "32.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "rustc-hash", "serde", @@ -12773,7 +12828,7 @@ dependencies = [ [[package]] name = "sp-runtime" version = "39.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "docify", "either", @@ -12791,7 +12846,7 @@ dependencies = [ "sp-arithmetic 26.0.0", "sp-core 34.0.0", "sp-io 38.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-weights 31.0.0", "tracing", ] @@ -12819,7 +12874,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface" version = "28.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -12827,8 +12882,8 @@ dependencies = [ "polkavm-derive 0.9.1", "primitive-types", "sp-externalities 0.29.0", - "sp-runtime-interface-proc-macro 18.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-runtime-interface-proc-macro 18.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-storage 21.0.0", "sp-tracing 17.0.0", "sp-wasm-interface 21.0.0", @@ -12852,7 +12907,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface-proc-macro" version = "18.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "Inflector", "expander", @@ -12865,7 +12920,7 @@ dependencies = [ [[package]] name = "sp-session" version = "35.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "parity-scale-codec", "scale-info", @@ -12879,7 +12934,7 @@ dependencies = [ [[package]] name = "sp-staking" version = "34.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", @@ -12914,7 +12969,7 @@ dependencies = [ [[package]] name = "sp-state-machine" version = "0.43.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "hash-db 0.16.0", "log", @@ -12924,7 +12979,7 @@ dependencies = [ "smallvec", "sp-core 34.0.0", "sp-externalities 0.29.0", - "sp-panic-handler 13.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-panic-handler 13.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-trie 37.0.0", "thiserror", "tracing", @@ -12934,7 +12989,7 @@ dependencies = [ [[package]] name = "sp-statement-store" version = "18.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "aes-gcm", "curve25519-dalek 4.1.3", @@ -12947,7 +13002,7 @@ dependencies = [ "sp-api", "sp-application-crypto 38.0.0", "sp-core 34.0.0", - "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-crypto-hashing 0.1.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-externalities 0.29.0", "sp-runtime 39.0.0", "sp-runtime-interface 28.0.0", @@ -12964,7 +13019,7 @@ checksum = "12f8ee986414b0a9ad741776762f4083cd3a5128449b982a3919c4df36874834" [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" [[package]] name = "sp-storage" @@ -12983,19 +13038,19 @@ dependencies = [ [[package]] name = "sp-storage" version = "21.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "impl-serde", "parity-scale-codec", "ref-cast", "serde", - "sp-debug-derive 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-debug-derive 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] name = "sp-timestamp" version = "34.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "async-trait", "parity-scale-codec", @@ -13020,7 +13075,7 @@ dependencies = [ [[package]] name = "sp-tracing" version = "17.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "parity-scale-codec", "tracing", @@ -13031,7 +13086,7 @@ dependencies = [ [[package]] name = "sp-transaction-pool" version = "34.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "sp-api", "sp-runtime 39.0.0", @@ -13040,7 +13095,7 @@ dependencies = [ [[package]] name = "sp-transaction-storage-proof" version = "34.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "async-trait", "parity-scale-codec", @@ -13079,7 +13134,7 @@ dependencies = [ [[package]] name = "sp-trie" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "ahash 0.8.11", "hash-db 0.16.0", @@ -13102,7 +13157,7 @@ dependencies = [ [[package]] name = "sp-version" version = "37.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "impl-serde", "parity-scale-codec", @@ -13111,7 +13166,7 @@ dependencies = [ "serde", "sp-crypto-hashing-proc-macro", "sp-runtime 39.0.0", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-version-proc-macro", "thiserror", ] @@ -13119,7 +13174,7 @@ dependencies = [ [[package]] name = "sp-version-proc-macro" version = "14.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "parity-scale-codec", "proc-macro2", @@ -13144,7 +13199,7 @@ dependencies = [ [[package]] name = "sp-wasm-interface" version = "21.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "anyhow", "impl-trait-for-tuples", @@ -13172,7 +13227,7 @@ dependencies = [ [[package]] name = "sp-weights" version = "31.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "bounded-collections", "parity-scale-codec", @@ -13180,7 +13235,7 @@ dependencies = [ "serde", "smallvec", "sp-arithmetic 26.0.0", - "sp-debug-derive 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-debug-derive 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", ] [[package]] @@ -13303,7 +13358,7 @@ dependencies = [ "sp-offchain", "sp-runtime 39.0.0", "sp-session", - "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2)", + "sp-std 14.0.0 (git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2)", "sp-transaction-pool", "sp-version", "substrate-wasm-builder", @@ -13452,7 +13507,7 @@ dependencies = [ [[package]] name = "substrate-bip39" version = "0.6.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "hmac 0.12.1", "pbkdf2 0.12.2", @@ -13464,12 +13519,12 @@ dependencies = [ [[package]] name = "substrate-build-script-utils" version = "11.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" [[package]] name = "substrate-frame-rpc-system" version = "38.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "docify", "frame-system-rpc-runtime-api", @@ -13489,7 +13544,7 @@ dependencies = [ [[package]] name = "substrate-prometheus-endpoint" version = "0.17.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "http-body-util", "hyper 1.5.1", @@ -13503,7 +13558,7 @@ dependencies = [ [[package]] name = "substrate-wasm-builder" version = "24.0.0" -source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2+2#d4792faaa7ab3fbb9798dcc629564d182853690e" +source = "git+https://github.com/chainflip-io/polkadot-sdk.git?tag=chainflip-substrate-1.15.2%2B2#d4792faaa7ab3fbb9798dcc629564d182853690e" dependencies = [ "array-bytes", "build-helper", @@ -15957,7 +16012,7 @@ dependencies = [ [[package]] name = "zmq" version = "0.9.2" -source = "git+https://github.com/chainflip-io/rust-zmq.git?tag=chainflip-v0.9.2+1#3fb5e132ddd7c3ed40da8a93c3a2fdeef2853e69" +source = "git+https://github.com/chainflip-io/rust-zmq.git?tag=chainflip-v0.9.2%2B1#3fb5e132ddd7c3ed40da8a93c3a2fdeef2853e69" dependencies = [ "bitflags 1.3.2", "libc", @@ -15968,7 +16023,7 @@ dependencies = [ [[package]] name = "zmq-sys" version = "0.11.0" -source = "git+https://github.com/chainflip-io/rust-zmq.git?tag=chainflip-v0.9.2+1#3fb5e132ddd7c3ed40da8a93c3a2fdeef2853e69" +source = "git+https://github.com/chainflip-io/rust-zmq.git?tag=chainflip-v0.9.2%2B1#3fb5e132ddd7c3ed40da8a93c3a2fdeef2853e69" dependencies = [ "libc", "metadeps", diff --git a/Cargo.toml b/Cargo.toml index 23d510d46f..dd032cefed 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -129,6 +129,7 @@ reqwest = { version = "0.11.4" } rlp = { version = "0.5.2", default-features = false } rocksdb = { version = "0.21.0" } serde_bytes = { version = "0.11.14", default-features = false } +serde-big-array = "0.5.1" serde_path_to_error = "*" serde_with = { version = "3.11.0", default-features = false } scale-decode = { version = "0.13" } diff --git a/api/bin/chainflip-ingress-egress-tracker/src/witnessing/btc.rs b/api/bin/chainflip-ingress-egress-tracker/src/witnessing/btc.rs index 2e8070c508..599565ac1f 100644 --- a/api/bin/chainflip-ingress-egress-tracker/src/witnessing/btc.rs +++ b/api/bin/chainflip-ingress-egress-tracker/src/witnessing/btc.rs @@ -52,7 +52,7 @@ where move |header| { let btc_client = btc_client.clone(); async move { - let block = btc_client.block(header.hash).await; + let block = btc_client.block(header.hash).await.expect("TODO: Delete this"); (header.data, block.txdata) } } diff --git a/api/bin/chainflip-ingress-egress-tracker/src/witnessing/state_chain.rs b/api/bin/chainflip-ingress-egress-tracker/src/witnessing/state_chain.rs index fafbfff21b..51c0d4b7b4 100644 --- a/api/bin/chainflip-ingress-egress-tracker/src/witnessing/state_chain.rs +++ b/api/bin/chainflip-ingress-egress-tracker/src/witnessing/state_chain.rs @@ -370,7 +370,8 @@ where Swapping(_) | LiquidityProvider(_) | LiquidityPools(_) | - SolanaElections(_) => {}, + SolanaElections(_) | + BitcoinElections(_) => {}, }; Ok(()) diff --git a/engine/src/btc/retry_rpc.rs b/engine/src/btc/retry_rpc.rs index fd15a6fa7d..1286954bc5 100644 --- a/engine/src/btc/retry_rpc.rs +++ b/engine/src/btc/retry_rpc.rs @@ -51,7 +51,7 @@ impl BtcRetryRpcClient { #[async_trait::async_trait] pub trait BtcRetryRpcApi: Clone { - async fn block(&self, block_hash: BlockHash) -> VerboseBlock; + async fn block(&self, block_hash: BlockHash) -> anyhow::Result; async fn block_hash(&self, block_number: cf_chains::btc::BlockNumber) -> BlockHash; @@ -61,19 +61,25 @@ pub trait BtcRetryRpcApi: Clone { async fn average_block_fee_rate(&self, block_hash: BlockHash) -> cf_chains::btc::BtcAmount; - async fn best_block_header(&self) -> BlockHeader; + async fn best_block_header(&self) -> anyhow::Result; + + async fn block_header( + &self, + block_number: cf_chains::btc::BlockNumber, + ) -> anyhow::Result; } #[async_trait::async_trait] impl BtcRetryRpcApi for BtcRetryRpcClient { - async fn block(&self, block_hash: BlockHash) -> VerboseBlock { + async fn block(&self, block_hash: BlockHash) -> anyhow::Result { self.retry_client - .request( + .request_with_limit( RequestLog::new("block".to_string(), Some(format!("{block_hash}"))), Box::pin(move |client| { #[allow(clippy::redundant_async_block)] Box::pin(async move { client.block(block_hash).await }) }), + 2, ) .await } @@ -134,9 +140,9 @@ impl BtcRetryRpcApi for BtcRetryRpcClient { .await } - async fn best_block_header(&self) -> BlockHeader { + async fn best_block_header(&self) -> anyhow::Result { self.retry_client - .request( + .request_with_limit( RequestLog::new("best_block_header".to_string(), None), Box::pin(move |client| { #[allow(clippy::redundant_async_block)] @@ -147,6 +153,28 @@ impl BtcRetryRpcApi for BtcRetryRpcClient { Ok(header) }) }), + 2, + ) + .await + } + + async fn block_header( + &self, + block_number: cf_chains::btc::BlockNumber, + ) -> anyhow::Result { + self.retry_client + .request_with_limit( + RequestLog::new("block_header".to_string(), Some(block_number.to_string())), + Box::pin(move |client| { + #[allow(clippy::redundant_async_block)] + Box::pin(async move { + let block_hash = client.block_hash(block_number).await?; + let header = client.block_header(block_hash).await?; + assert_eq!(header.hash, block_hash); + Ok(header) + }) + }), + 2, ) .await } @@ -200,7 +228,7 @@ pub mod mocks { #[async_trait::async_trait] impl BtcRetryRpcApi for BtcRetryRpcClient { - async fn block(&self, block_hash: BlockHash) -> VerboseBlock; + async fn block(&self, block_hash: BlockHash) -> anyhow::Result; async fn block_hash(&self, block_number: cf_chains::btc::BlockNumber) -> BlockHash; @@ -210,7 +238,12 @@ pub mod mocks { async fn average_block_fee_rate(&self, block_hash: BlockHash) -> cf_chains::btc::BtcAmount; - async fn best_block_header(&self) -> BlockHeader; + async fn best_block_header(&self) -> anyhow::Result; + + async fn block_header( + &self, + block_number: cf_chains::btc::BlockNumber, + ) -> anyhow::Result; } } } diff --git a/engine/src/elections.rs b/engine/src/elections.rs index 8d7eea006d..623944a2e3 100644 --- a/engine/src/elections.rs +++ b/engine/src/elections.rs @@ -9,6 +9,7 @@ use crate::{ }, }; use anyhow::anyhow; +use cf_chains::instances::ChainInstanceAlias; use cf_primitives::MILLISECONDS_PER_BLOCK; use cf_utilities::{future_map::FutureMap, task_scope::Scope, UnendingStream}; use futures::{stream, StreamExt, TryStreamExt}; @@ -31,28 +32,30 @@ const MAXIMUM_SHARED_DATA_CACHE_ITEMS: usize = 1024; const MAXIMUM_CONCURRENT_VOTER_REQUESTS: u32 = 32; const INITIAL_VOTER_REQUEST_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10); +pub type ChainInstance = ::Instance; + pub struct Voter< - Instance: 'static, - StateChainClient: ElectoralApi + SignedExtrinsicApi + ChainApi, - VoterClient: CompositeVoterApi<>::ElectoralSystemRunner> + Send + Sync + 'static, + Chain: cf_chains::Chain + 'static, + StateChainClient: ElectoralApi> + SignedExtrinsicApi + ChainApi, + VoterClient: CompositeVoterApi<>>::ElectoralSystemRunner> + Send + Sync + 'static, > where state_chain_runtime::Runtime: - pallet_cf_elections::Config, + pallet_cf_elections::Config>, { state_chain_client: Arc, voter: RetrierClient, - _phantom: core::marker::PhantomData, + _phantom: core::marker::PhantomData, } impl< - Instance: Send + Sync + 'static, - StateChainClient: ElectoralApi + SignedExtrinsicApi + ChainApi, - VoterClient: CompositeVoterApi<>::ElectoralSystemRunner> + Clone + Send + Sync + 'static, - > Voter + Chain: cf_chains::Chain + 'static, + StateChainClient: ElectoralApi> + SignedExtrinsicApi + ChainApi, + VoterClient: CompositeVoterApi<>>::ElectoralSystemRunner> + Clone + Send + Sync + 'static, + > Voter where state_chain_runtime::Runtime: - pallet_cf_elections::Config, - pallet_cf_elections::Call: + pallet_cf_elections::Config>, + pallet_cf_elections::Call>: std::convert::Into, { pub fn new( @@ -76,9 +79,9 @@ where pub async fn continuously_vote(self) { loop { - info!("Beginning voting"); + info!("{}: Beginning voting", Chain::NAME); if let Err(error) = self.reset_and_continuously_vote().await { - error!("Voting reset due to error: '{}'", error); + error!("{}: Voting reset due to error: '{error}'", Chain::NAME); } } } @@ -87,14 +90,16 @@ where let mut rng = rand::rngs::OsRng; let latest_unfinalized_block = self.state_chain_client.latest_unfinalized_block(); if let Some(_electoral_data) = self.state_chain_client.electoral_data(latest_unfinalized_block).await { - let (_, _, block_header, _) = self.state_chain_client.submit_signed_extrinsic(pallet_cf_elections::Call::::ignore_my_votes {}).await.until_in_block().await?; + tracing::info!("{}: Got some electoral data", Chain::NAME); + let (_, _, block_header, _) = self.state_chain_client.submit_signed_extrinsic(pallet_cf_elections::Call::>::ignore_my_votes {}).await.until_in_block().await?; if let Some(electoral_data) = self.state_chain_client.electoral_data(block_header.into()).await { + tracing::info!("{}: Got some electoral data 2", Chain::NAME); stream::iter(electoral_data.current_elections).map(|(election_identifier, election_data)| { let state_chain_client = &self.state_chain_client; async move { if election_data.option_existing_vote.is_some() { - state_chain_client.finalize_signed_extrinsic(pallet_cf_elections::Call::::delete_vote { + state_chain_client.finalize_signed_extrinsic(pallet_cf_elections::Call::>::delete_vote { election_identifier, }).await.until_in_block().await?; } @@ -102,26 +107,29 @@ where } }).buffer_unordered(32).try_collect::>().await?; - self.state_chain_client.submit_signed_extrinsic(pallet_cf_elections::Call::::stop_ignoring_my_votes {}).await.until_in_block().await?; + tracing::info!("{}: Submitting signed extrinsic", Chain::NAME); + self.state_chain_client.submit_signed_extrinsic(pallet_cf_elections::Call::>::stop_ignoring_my_votes {}).await.until_in_block().await?; + tracing::info!("{}: Submitted signed extrinsic", Chain::NAME); } } let mut unfinalized_block_stream = self.state_chain_client.unfinalized_block_stream().await; + // TEMP: Half block time to hack BTC voting. const BLOCK_TIME: std::time::Duration = - std::time::Duration::from_millis(MILLISECONDS_PER_BLOCK); + std::time::Duration::from_millis(MILLISECONDS_PER_BLOCK / 2); let mut submit_interval = tokio::time::interval(BLOCK_TIME); let mut pending_submissions = BTreeMap::< - CompositeElectionIdentifierOf<>::ElectoralSystemRunner>, + CompositeElectionIdentifierOf<>>::ElectoralSystemRunner>, ( - <<>::ElectoralSystemRunner as ElectoralSystemRunner>::Vote as VoteStorage>::PartialVote, - <<>::ElectoralSystemRunner as ElectoralSystemRunner>::Vote as VoteStorage>::Vote, + <<>>::ElectoralSystemRunner as ElectoralSystemRunner>::Vote as VoteStorage>::PartialVote, + <<>>::ElectoralSystemRunner as ElectoralSystemRunner>::Vote as VoteStorage>::Vote, ) >::default(); let mut vote_tasks = FutureMap::default(); let mut shared_data_cache = HashMap::< SharedDataHash, ( - <<>::ElectoralSystemRunner as ElectoralSystemRunner>::Vote as VoteStorage>::SharedData, + <<>>::ElectoralSystemRunner as ElectoralSystemRunner>::Vote as VoteStorage>::SharedData, std::time::Instant, ) >::default(); @@ -149,10 +157,10 @@ where let state_chain_client = &self.state_chain_client; async move { for (election_identifier, _) in votes.iter() { - info!("Submitting vote for election: '{:?}'", election_identifier); + info!("{}: Submitting vote for election: '{election_identifier:?}'", Chain::NAME); } // TODO: Use block hash you got this vote tasks details from as the based of the mortal of the extrinsic - state_chain_client.submit_signed_extrinsic(pallet_cf_elections::Call::::vote { + state_chain_client.submit_signed_extrinsic(pallet_cf_elections::Call::>::vote { authority_votes: BTreeMap::from_iter(votes).try_into().unwrap(/*Safe due to chunking*/), }).await; } @@ -161,9 +169,9 @@ where let (election_identifier, result_vote) = vote_tasks.next_or_pending() => { match result_vote { Ok(vote) => { - info!("Voting task for election: '{:?}' succeeded.", election_identifier); + info!("{}: Voting task for election: '{election_identifier:?}' succeeded.", Chain::NAME); // Create the partial_vote early so that SharedData can be provided as soon as the vote has been generated, rather than only after it is submitted. - let partial_vote = <<>::ElectoralSystemRunner as ElectoralSystemRunner>::Vote as VoteStorage>::vote_into_partial_vote(&vote, |shared_data| { + let partial_vote = <<>>::ElectoralSystemRunner as ElectoralSystemRunner>::Vote as VoteStorage>::vote_into_partial_vote(&vote, |shared_data| { let shared_data_hash = SharedDataHash::of(&shared_data); if shared_data_cache.len() > MAXIMUM_SHARED_DATA_CACHE_ITEMS { for shared_data_hash in shared_data_cache.keys().cloned().take(shared_data_cache.len() - MAXIMUM_SHARED_DATA_CACHE_ITEMS).collect::>() { @@ -177,7 +185,7 @@ where pending_submissions.insert(election_identifier, (partial_vote, vote)); }, Err(error) => { - warn!("Voting task for election '{:?}' failed with error: '{:?}'.", election_identifier, error); + warn!("{}: Voting task for election '{election_identifier:?}' failed with error: '{error:?}'.", Chain::NAME); } } }, @@ -190,16 +198,18 @@ where added_to_cache.elapsed() < LIFETIME_OF_SHARED_DATA_IN_CACHE }); + tracing::info!("{}: Unfinalised next, getting electoral_data", Chain::NAME); if let Some(electoral_data) = self.state_chain_client.electoral_data(block_info).await { + tracing::info!("{}: Unfinalised next, got some electoral_data: {:?}", Chain::NAME, electoral_data); if electoral_data.contributing { for (election_identifier, election_data) in electoral_data.current_elections { if election_data.is_vote_desired { if !vote_tasks.contains_key(&election_identifier) { - info!("Voting task for election: '{:?}' initiate.", election_identifier); + info!("{}: Voting task for election: '{election_identifier:?}' initiate.", Chain::NAME); vote_tasks.insert( election_identifier, Box::pin(self.voter.request_with_limit( - RequestLog::new("vote".to_string(), Some(format!("{election_identifier:?}"))), // Add some identifier for `Instance`. + RequestLog::new("vote".to_string(), Some(format!("{}: {election_identifier:?}", Chain::NAME))), Box::pin(move |client| { let election_data = election_data.clone(); #[allow(clippy::redundant_async_block)] @@ -214,7 +224,7 @@ where )) ); } else { - info!("Voting task for election: '{:?}' not initiated as a task is already running for that election.", election_identifier); + info!("{}: Voting task for election: '{election_identifier:?}' not initiated as a task is already running for that election.", Chain::NAME); } } } @@ -232,7 +242,7 @@ where let final_probability = 1.0 / (core::cmp::max(1, core::cmp::min(reference_details.count, electoral_data.authority_count)) as f64); if rng.gen_bool((1.0 - lerp_factor) * initial_probability + lerp_factor * final_probability) { - self.state_chain_client.submit_signed_extrinsic(pallet_cf_elections::Call::::provide_shared_data { + self.state_chain_client.submit_signed_extrinsic(pallet_cf_elections::Call::>::provide_shared_data { shared_data: shared_data.clone(), }).await; } @@ -242,10 +252,10 @@ where } else { // We expect this to happen when a validator joins the set, since they won't be contributing, but will be a validator. // Therefore they get Some() from `electoral_data` but `contributing` is false, until we reset the voting by throwing an error here. - return Err(anyhow!("Validator has just joined the authority set, or has been unexpectedly set as not contributing.")); + return Err(anyhow!("{}: Validator has just joined the authority set, or has been unexpectedly set as not contributing.", Chain::NAME)); } } else { - info!("Not voting as not an authority."); + info!("{}: Not voting as not an authority.", Chain::NAME); } } else break Ok(()), } diff --git a/engine/src/elections/voter_api.rs b/engine/src/elections/voter_api.rs index 3d1c30a36d..5ff06a014f 100644 --- a/engine/src/elections/voter_api.rs +++ b/engine/src/elections/voter_api.rs @@ -78,4 +78,6 @@ macro_rules! generate_voter_api_tuple_impls { } } +generate_voter_api_tuple_impls!(tuple_1_impls: ((A, A0))); +generate_voter_api_tuple_impls!(tuple_2_impls: ((A, A0), (B, B0))); generate_voter_api_tuple_impls!(tuple_7_impls: ((A, A0), (B, B0), (C, C0), (D, D0), (EE, E0), (FF, F0), (GG, G0))); diff --git a/engine/src/state_chain_observer/client/electoral_api.rs b/engine/src/state_chain_observer/client/electoral_api.rs index 61460b7766..60fc9641d6 100644 --- a/engine/src/state_chain_observer/client/electoral_api.rs +++ b/engine/src/state_chain_observer/client/electoral_api.rs @@ -7,11 +7,11 @@ use pallet_cf_elections::{ electoral_system_runner::CompositeElectionIdentifierOf, vote_storage::VoteStorage, ElectoralDataFor, ElectoralSystemRunner, }; -use state_chain_runtime::SolanaInstance; +use state_chain_runtime::{BitcoinInstance, SolanaInstance}; use std::collections::{BTreeMap, BTreeSet}; use tracing::error; -pub trait ElectoralApi +pub trait ElectoralApi where state_chain_runtime::Runtime: pallet_cf_elections::Config, { @@ -37,7 +37,7 @@ where impl< RawRpcClient: RawRpcApi + Send + Sync + 'static, SignedExtrinsicClient: SignedExtrinsicApi + Send + Sync + 'static, - > ElectoralApi + > ElectoralApi for StateChainClient> { fn electoral_data( @@ -67,10 +67,14 @@ impl< fn filter_votes( &self, proposed_votes: BTreeMap< - CompositeElectionIdentifierOf<>::ElectoralSystemRunner>, - <<>::ElectoralSystemRunner as ElectoralSystemRunner>::Vote as VoteStorage>::Vote, - >, - ) -> impl std::future::Future>::ElectoralSystemRunner>>> + Send + 'static{ + CompositeElectionIdentifierOf<>::ElectoralSystemRunner>, + <<>::ElectoralSystemRunner as +ElectoralSystemRunner>::Vote as VoteStorage>::Vote, >, + ) -> impl std::future::Future>::ElectoralSystemRunner>>> + Send + 'static{ let base_rpc_client = self.base_rpc_client.clone(); let account_id = self.signed_extrinsic_client.account_id(); async move { @@ -96,3 +100,73 @@ impl< } } } + +impl< + RawRpcClient: RawRpcApi + Send + Sync + 'static, + SignedExtrinsicClient: SignedExtrinsicApi + Send + Sync + 'static, + > ElectoralApi + for StateChainClient> +{ + fn electoral_data( + &self, + block: BlockInfo, + ) -> impl std::future::Future< + Output = Option>, + > + Send + + 'static { + let base_rpc_client = self.base_rpc_client.clone(); + let account_id = self.signed_extrinsic_client.account_id(); + async move { + base_rpc_client + .raw_rpc_client + .cf_bitcoin_electoral_data(account_id, Some(block.hash)) + .await + .map_err(anyhow::Error::from) + .and_then(|electoral_data| { + > as Decode>::decode(&mut &electoral_data[..]).map_err(Into::into) + }) + .inspect_err(|error| { + error!("Failure in electoral_data rpc: '{}'", error); + }) + .ok() + .flatten() + } + } + + fn filter_votes( + &self, + proposed_votes: BTreeMap< + CompositeElectionIdentifierOf<>::ElectoralSystemRunner>, + <<>::ElectoralSystemRunner as +ElectoralSystemRunner>::Vote as VoteStorage>::Vote, >, + ) -> impl std::future::Future>::ElectoralSystemRunner>>> + Send + 'static{ + let base_rpc_client = self.base_rpc_client.clone(); + let account_id = self.signed_extrinsic_client.account_id(); + async move { + base_rpc_client + .raw_rpc_client + .cf_bitcoin_filter_votes(account_id, proposed_votes.encode(), None) + .await + .map_err(anyhow::Error::from) + .and_then(|electoral_data| { + >::ElectoralSystemRunner, + >, + > as Decode>::decode(&mut &electoral_data[..]) + .map_err(Into::into) + }) + .inspect_err(|error| { + error!("Failure in filter_votes rpc: '{}'", error); + }) + .unwrap_or_default() + } + } +} diff --git a/engine/src/witness.rs b/engine/src/witness.rs index 52d2eef949..ece7e24244 100644 --- a/engine/src/witness.rs +++ b/engine/src/witness.rs @@ -1,5 +1,6 @@ pub mod arb; pub mod btc; +pub mod btc_e; pub mod common; pub mod dot; pub mod eth; diff --git a/engine/src/witness/btc.rs b/engine/src/witness/btc.rs index 330f5cddb0..5c41cd8c10 100644 --- a/engine/src/witness/btc.rs +++ b/engine/src/witness/btc.rs @@ -1,5 +1,5 @@ mod chain_tracking; -mod deposits; +pub mod deposits; pub mod source; pub mod vault_swaps; @@ -115,7 +115,7 @@ where move |header| { let btc_client = btc_client.clone(); async move { - let block = btc_client.block(header.hash).await; + let block = btc_client.block(header.hash).await.expect("TODO: Delete this"); (header.data, block.txdata) } } diff --git a/engine/src/witness/btc/deposits.rs b/engine/src/witness/btc/deposits.rs index a49f8038e6..cde7cf44e6 100644 --- a/engine/src/witness/btc/deposits.rs +++ b/engine/src/witness/btc/deposits.rs @@ -128,7 +128,7 @@ impl ChunkedByVaultBuilder { } } -fn deposit_witnesses( +pub fn deposit_witnesses( txs: &[VerboseTransaction], deposit_addresses: &HashMap, DepositAddress>, ) -> Vec> { @@ -167,7 +167,7 @@ fn deposit_witnesses( .collect() } -fn map_script_addresses( +pub fn map_script_addresses( deposit_channels: Vec>, ) -> HashMap, DepositAddress> { deposit_channels diff --git a/engine/src/witness/btc/source.rs b/engine/src/witness/btc/source.rs index 890bdfbc9e..0989fbab96 100644 --- a/engine/src/witness/btc/source.rs +++ b/engine/src/witness/btc/source.rs @@ -45,7 +45,10 @@ where loop { tick.tick().await; - let best_block_header = client.best_block_header().await; + let best_block_header = client + .best_block_header() + .await + .expect("TODO: This whole source will be deleted "); if last_block_hash_yielded != Some(best_block_header.hash) { return Some(( Header { diff --git a/engine/src/witness/btc_e.rs b/engine/src/witness/btc_e.rs new file mode 100644 index 0000000000..1162ae8d13 --- /dev/null +++ b/engine/src/witness/btc_e.rs @@ -0,0 +1,204 @@ +//! For BTC Elections + +use bitcoin::hashes::Hash; +use cf_chains::{ + btc::{self, BlockNumber}, + witness_period::BlockWitnessRange, + Bitcoin, +}; +use cf_utilities::task_scope::{self, Scope}; +use futures::FutureExt; +use pallet_cf_elections::{ + electoral_system::ElectoralSystem, + electoral_systems::block_height_tracking::{ + primitives::Header, BlockHeightTrackingProperties, InputHeaders, + }, + vote_storage::VoteStorage, +}; +use sp_core::bounded::alloc::collections::VecDeque; +use state_chain_runtime::{ + chainflip::bitcoin_elections::{ + BitcoinBlockHeightTracking, BitcoinDepositChannelWitnessing, BitcoinElectoralSystemRunner, + }, + BitcoinInstance, +}; + +use crate::{ + btc::{retry_rpc::BtcRetryRpcApi, rpc::BlockHeader}, + elections::voter_api::{CompositeVoter, VoterApi}, + retrier::RetryLimit, + state_chain_observer::client::{ + chain_api::ChainApi, electoral_api::ElectoralApi, + extrinsic_api::signed::SignedExtrinsicApi, storage_api::StorageApi, + }, + witness::btc::deposits::{deposit_witnesses, map_script_addresses}, +}; +use anyhow::{anyhow, Result}; + +use std::sync::Arc; + +use crate::btc::retry_rpc::BtcRetryRpcClient; + +#[derive(Clone)] +pub struct BitcoinDepositChannelWitnessingVoter { + client: BtcRetryRpcClient, +} + +#[async_trait::async_trait] +impl VoterApi for BitcoinDepositChannelWitnessingVoter { + async fn vote( + &self, + _settings: ::ElectoralSettings, + deposit_addresses: ::ElectionProperties, + ) -> Result< + <::Vote as VoteStorage>::Vote, + anyhow::Error, + > { + let (witness_range, deposit_addresses) = deposit_addresses; + tracing::info!("Deposit channel witnessing properties: {:?}", deposit_addresses); + + let mut txs = vec![]; + // we only ever expect this to be one for bitcoin, but for completeness, we loop. + tracing::info!("Witness range: {:?}", witness_range); + for block in BlockWitnessRange::::into_range_inclusive(witness_range) { + tracing::info!("Checking block {:?}", block); + + // TODO: these queries should not be infinite + let block_hash = self.client.block_hash(block).await; + + let block = self.client.block(block_hash).await?; + + txs.extend(block.txdata); + } + + let deposit_addresses = map_script_addresses(deposit_addresses); + + let witnesses = deposit_witnesses(&txs, &deposit_addresses); + + if witnesses.is_empty() { + tracing::info!("No witnesses found for BTCE"); + } else { + tracing::info!("Witnesses from BTCE: {:?}", witnesses); + } + + Ok(witnesses) + } +} + +#[derive(Clone)] +pub struct BitcoinBlockHeightTrackingVoter { + client: BtcRetryRpcClient, +} + +#[async_trait::async_trait] +impl VoterApi for BitcoinBlockHeightTrackingVoter { + async fn vote( + &self, + _settings: ::ElectoralSettings, + // We could use 0 as a special case (to avoid requiring an Option) + properties: ::ElectionProperties, + ) -> std::result::Result< + <::Vote as VoteStorage>::Vote, + anyhow::Error, + > { + tracing::info!("Block height tracking called properties: {:?}", properties); + let BlockHeightTrackingProperties { witness_from_index } = properties; + + let mut headers = VecDeque::new(); + + let header_from_btc_header = + |header: BlockHeader| -> anyhow::Result> { + Ok(Header { + block_height: header.height, + hash: header.hash.to_byte_array().into(), + parent_hash: header + .previous_block_hash + .ok_or_else(|| anyhow::anyhow!("No parent hash"))? + .to_byte_array() + .into(), + }) + }; + + let get_header = |index: BlockNumber| { + async move { + let header = self.client.block_header(index).await?; + // tracing::info!("bht: Voting for block height tracking: {:?}", header.height); + // Order from lowest to highest block index. + Ok::, anyhow::Error>(header_from_btc_header(header)?) + } + }; + + let best_block_header = header_from_btc_header(self.client.best_block_header().await?)?; + + if best_block_header.block_height <= witness_from_index { + Err(anyhow::anyhow!("btc: no new blocks found since best block height is {} for witness_from={witness_from_index}", best_block_header.block_height)) + } else if witness_from_index == 0 { + headers.push_back(best_block_header); + Ok(InputHeaders(headers)) + } else { + // fetch the headers we haven't got yet + for index in witness_from_index..best_block_header.block_height { + // let header = self.client.block_header(index).await?; + // tracing::info!("bht: Voting for block height tracking: {:?}", header.height); + // Order from lowest to highest block index. + headers.push_back(get_header(index).await?); + } + + headers.push_back(best_block_header); + tracing::info!( + "bht: Voting for block height tracking: {:?}", + headers.iter().map(|h| h.block_height) + ); + + // We should have a chain of hashees. + if headers.iter().zip(headers.iter().skip(1)).all(|(a, b)| a.hash == b.parent_hash) { + tracing::info!( + "bht: Submitting vote for (witness_from={witness_from_index})with {} headers", + headers.len() + ); + Ok(InputHeaders(headers)) + } else { + Err(anyhow::anyhow!("bht: Headers do not form a chain")) + } + } + } +} + +pub async fn start( + scope: &Scope<'_, anyhow::Error>, + client: BtcRetryRpcClient, + state_chain_client: Arc, +) -> Result<()> +where + StateChainClient: StorageApi + + ChainApi + + SignedExtrinsicApi + + ElectoralApi + + 'static + + Send + + Sync, +{ + tracing::info!("Starting BTC witness"); + scope.spawn(async move { + task_scope::task_scope(|scope| { + async { + crate::elections::Voter::new( + scope, + state_chain_client, + CompositeVoter::::new(( + BitcoinBlockHeightTrackingVoter { client: client.clone() }, + BitcoinDepositChannelWitnessingVoter { client }, + )), + ) + .continuously_vote() + .await; + + Ok(()) + } + .boxed() + }) + .await + }); + + Ok(()) +} diff --git a/engine/src/witness/sol.rs b/engine/src/witness/sol.rs index 1745336bbf..03538979d4 100644 --- a/engine/src/witness/sol.rs +++ b/engine/src/witness/sol.rs @@ -238,7 +238,7 @@ where StateChainClient: StorageApi + ChainApi + SignedExtrinsicApi - + ElectoralApi + + ElectoralApi + 'static + Send + Sync, diff --git a/engine/src/witness/start.rs b/engine/src/witness/start.rs index ab0ac1b422..841dbd3b4f 100644 --- a/engine/src/witness/start.rs +++ b/engine/src/witness/start.rs @@ -16,7 +16,7 @@ use crate::{ stream_api::{StreamApi, FINALIZED, UNFINALIZED}, }, }; -use state_chain_runtime::SolanaInstance; +use state_chain_runtime::{BitcoinInstance, SolanaInstance}; use super::common::epoch_source::EpochSource; @@ -43,7 +43,8 @@ where StateChainClient: StorageApi + ChainApi + SignedExtrinsicApi - + ElectoralApi + + ElectoralApi + + ElectoralApi + 'static + Send + Sync, @@ -99,17 +100,17 @@ where db.clone(), ); - let start_btc = super::btc::start( - scope, - btc_client, - witness_call.clone(), - prewitness_call, - state_chain_client.clone(), - state_chain_stream.clone(), - unfinalised_state_chain_stream.clone(), - epoch_source.clone(), - db.clone(), - ); + // let start_btc = super::btc::start( + // scope, + // btc_client.clone(), + // witness_call.clone(), + // prewitness_call, + // state_chain_client.clone(), + // state_chain_stream.clone(), + // unfinalised_state_chain_stream.clone(), + // epoch_source.clone(), + // db.clone(), + // ); let start_dot = super::dot::start( scope, @@ -131,9 +132,11 @@ where db, ); - let start_sol = super::sol::start(scope, sol_client, state_chain_client); + let start_sol = super::sol::start(scope, sol_client, state_chain_client.clone()); + + let start_btc_e = super::btc_e::start(scope, btc_client, state_chain_client); - futures::future::try_join5(start_eth, start_btc, start_dot, start_arb, start_sol).await?; + futures_util::try_join!(start_eth, start_dot, start_arb, start_sol, start_btc_e)?; Ok(()) } diff --git a/state-chain/cf-integration-tests/src/mock_runtime.rs b/state-chain/cf-integration-tests/src/mock_runtime.rs index 82a3e1d365..6fe08fb734 100644 --- a/state-chain/cf-integration-tests/src/mock_runtime.rs +++ b/state-chain/cf-integration-tests/src/mock_runtime.rs @@ -26,8 +26,8 @@ use state_chain_runtime::{ opaque::SessionKeys, test_runner::*, AccountId, AccountRolesConfig, ArbitrumChainTrackingConfig, BitcoinChainTrackingConfig, - EmissionsConfig, EnvironmentConfig, EthereumChainTrackingConfig, EthereumVaultConfig, - EvmThresholdSignerConfig, FlipConfig, FundingConfig, GovernanceConfig, + BitcoinElectionsConfig, EmissionsConfig, EnvironmentConfig, EthereumChainTrackingConfig, + EthereumVaultConfig, EvmThresholdSignerConfig, FlipConfig, FundingConfig, GovernanceConfig, PolkadotChainTrackingConfig, ReputationConfig, SessionConfig, SolanaChainTrackingConfig, SolanaElectionsConfig, ValidatorConfig, }; @@ -340,6 +340,7 @@ impl ExtBuilder { ), }), }, + bitcoin_elections: BitcoinElectionsConfig { option_initial_state: None }, ethereum_broadcaster: state_chain_runtime::EthereumBroadcasterConfig { broadcast_timeout: 5 * BLOCKS_PER_MINUTE_ETHEREUM, }, diff --git a/state-chain/chains/Cargo.toml b/state-chain/chains/Cargo.toml index c0cfdc9fc5..3cbd416b6e 100644 --- a/state-chain/chains/Cargo.toml +++ b/state-chain/chains/Cargo.toml @@ -56,6 +56,7 @@ hex = { workspace = true, features = ["serde"] } hex-literal = { workspace = true } serde = { workspace = true, features = ["derive", "alloc"] } serde_bytes = { workspace = true } +serde-big-array = { workspace = true } log = { workspace = true } itertools = { workspace = true } arrayref = { workspace = true } diff --git a/state-chain/chains/src/btc.rs b/state-chain/chains/src/btc.rs index d4d9d579c4..7110e0f685 100644 --- a/state-chain/chains/src/btc.rs +++ b/state-chain/chains/src/btc.rs @@ -109,8 +109,8 @@ impl FeeRefundCalculator for BitcoinTransactionData { Decode, MaxEncodedLen, TypeInfo, - Serialize, - Deserialize, + serde::Serialize, + serde::Deserialize, )] #[codec(mel_bound())] pub struct BitcoinTrackedData { @@ -365,7 +365,19 @@ fn verify_single_threshold_signature( // TODO: Look at moving this into Utxo. They're exactly the same apart from the ChannelId // which could be made generic, if even necessary at all. -#[derive(Encode, Decode, TypeInfo, Clone, RuntimeDebug, PartialEq, Eq, MaxEncodedLen, Default)] +#[derive( + Encode, + Decode, + TypeInfo, + Clone, + RuntimeDebug, + PartialEq, + Eq, + MaxEncodedLen, + Default, + serde::Serialize, + serde::Deserialize, +)] pub struct UtxoId { // TxId of the transaction in which this utxo was created. pub tx_id: Hash, @@ -387,7 +399,7 @@ pub enum Error { /// The address is invalid InvalidAddress, } -#[derive(Encode, Decode, TypeInfo, Clone, RuntimeDebug, PartialEq, Eq)] +#[derive(Encode, Decode, TypeInfo, Clone, RuntimeDebug, PartialEq, Eq, Serialize, Deserialize)] pub struct Utxo { pub id: UtxoId, pub amount: BtcAmount, @@ -988,7 +1000,17 @@ pub enum BitcoinOp { PushVersion { version: u8 }, } -#[derive(Encode, Decode, TypeInfo, Clone, RuntimeDebug, PartialEq, Eq)] +#[derive( + Encode, + Decode, + TypeInfo, + Clone, + RuntimeDebug, + PartialEq, + Eq, + serde::Serialize, + serde::Deserialize, +)] pub struct BitcoinScript { bytes: Vec, } diff --git a/state-chain/chains/src/btc/deposit_address.rs b/state-chain/chains/src/btc/deposit_address.rs index c90a3286ef..84a83d52e0 100644 --- a/state-chain/chains/src/btc/deposit_address.rs +++ b/state-chain/chains/src/btc/deposit_address.rs @@ -1,10 +1,24 @@ use crate::ChannelLifecycleHooks; +use serde_big_array::BigArray; + use super::*; -#[derive(Encode, Decode, MaxEncodedLen, TypeInfo, Clone, RuntimeDebug, PartialEq, Eq)] +#[derive( + Encode, + Decode, + MaxEncodedLen, + TypeInfo, + Clone, + RuntimeDebug, + PartialEq, + Eq, + serde::Serialize, + serde::Deserialize, +)] pub struct TapscriptPath { pub salt: u32, + #[serde(with = "BigArray")] pub tweaked_pubkey_bytes: [u8; 33], pub tapleaf_hash: [u8; 32], pub unlock_script: BitcoinScript, @@ -21,7 +35,18 @@ impl TapscriptPath { } } -#[derive(Encode, Decode, MaxEncodedLen, TypeInfo, Clone, RuntimeDebug, PartialEq, Eq)] +#[derive( + Encode, + Decode, + MaxEncodedLen, + TypeInfo, + Clone, + RuntimeDebug, + PartialEq, + Eq, + serde::Serialize, + serde::Deserialize, +)] pub struct DepositAddress { pub pubkey_x: [u8; 32], pub script_path: Option, diff --git a/state-chain/chains/src/lib.rs b/state-chain/chains/src/lib.rs index ed5a855183..ec259a4f53 100644 --- a/state-chain/chains/src/lib.rs +++ b/state-chain/chains/src/lib.rs @@ -69,9 +69,66 @@ pub mod instances; pub mod mocks; pub mod witness_period { - use core::ops::{Rem, Sub}; + use core::{ + iter::Step, + ops::{Rem, Sub}, + }; + + use sp_runtime::traits::Block; + use sp_std::ops::RangeInclusive; + + use codec::{Decode, Encode}; + use frame_support::{ + ensure, + sp_runtime::traits::{One, Saturating}, + }; + use scale_info::TypeInfo; + use serde::{Deserialize, Serialize}; + + // So we can store a range-like object in storage, since this has encode and decode. + #[derive( + Debug, + Clone, + PartialEq, + Eq, + Encode, + Decode, + TypeInfo, + Deserialize, + Serialize, + Default, + PartialOrd, + Ord, + )] + pub struct BlockWitnessRange { + root: I, + period: I, + } + + impl< + I: Copy + Saturating + Sub + Rem + Eq + One + PartialOrd, + > BlockWitnessRange + { + pub fn try_new(root: I, period: I) -> Result { + ensure!(period >= I::one(), ()); + ensure!(is_block_witness_root(period, root), ()); + Ok(Self { root, period }) + } + } + + impl BlockWitnessRange { + pub fn into_range_inclusive(self) -> RangeInclusive { + self.root..=self.root.saturating_add(self.period.saturating_sub(I::one())) + } + + pub fn root(&self) -> &I { + &self.root + } - use frame_support::sp_runtime::traits::{One, Saturating}; + pub fn period(&self) -> &I { + &self.period + } + } fn block_witness_floor< I: Copy + Saturating + Sub + Rem + Eq + One, @@ -109,6 +166,63 @@ pub mod witness_period { let floored_block_number = block_witness_floor(witness_period, block_number); floored_block_number..=floored_block_number.saturating_add(witness_period - One::one()) } + + impl Step for BlockWitnessRange { + fn steps_between(start: &Self, end: &Self) -> (usize, Option) { + // assert_eq!(start.period, end.period); + todo!() + } + + fn forward_checked(mut start: Self, count: usize) -> Option { + (0..count) + .for_each(|_| start.root = start.root.clone().saturating_add(start.period.clone())); + Some(start) + } + + fn backward_checked(mut start: Self, count: usize) -> Option { + (0..count) + .for_each(|_| start.root = start.root.clone().saturating_sub(start.period.clone())); + Some(start) + } + } + + pub trait BlockZero { + fn zero() -> Self; + fn is_zero(&self) -> bool; + } + + impl> BlockZero for BlockWitnessRange { + fn zero() -> Self { + Self { + root: I::zero(), + period: 1.into(), // NOTE: of course this is horribly wrong + } + } + + fn is_zero(&self) -> bool { + self.root.is_zero() + } + } + + impl BlockZero for u64 { + fn zero() -> Self { + 0 + } + + fn is_zero(&self) -> bool { + *self == 0 + } + } + + impl BlockZero for u32 { + fn zero() -> Self { + 0 + } + + fn is_zero(&self) -> bool { + *self == 0 + } + } } /// A trait representing all the types and constants that need to be implemented for supported diff --git a/state-chain/custom-rpc/src/lib.rs b/state-chain/custom-rpc/src/lib.rs index 5e96ffeddb..a697ab4577 100644 --- a/state-chain/custom-rpc/src/lib.rs +++ b/state-chain/custom-rpc/src/lib.rs @@ -993,6 +993,21 @@ pub trait CustomApi { at: Option, ) -> RpcResult>; + #[method(name = "bitcoin_electoral_data")] + fn cf_bitcoin_electoral_data( + &self, + validator: state_chain_runtime::AccountId, + at: Option, + ) -> RpcResult>; + + #[method(name = "bitcoin_filter_votes")] + fn cf_bitcoin_filter_votes( + &self, + validator: state_chain_runtime::AccountId, + proposed_votes: Vec, + at: Option, + ) -> RpcResult>; + #[method(name = "validate_dca_params")] fn cf_validate_dca_params( &self, @@ -1257,7 +1272,7 @@ where + BlockchainEvents + CallApiAt + StorageProvider, - C::Api: CustomRuntimeApi + ElectoralRuntimeApi, + C::Api: CustomRuntimeApi + ElectoralRuntimeApi, { pass_through! { cf_is_auction_phase() -> bool, @@ -1856,7 +1871,7 @@ where validator: state_chain_runtime::AccountId, at: Option, ) -> RpcResult> { - self.with_runtime_api(at, |api, hash| api.cf_electoral_data(hash, validator)) + self.with_runtime_api(at, |api, hash| api.cf_solana_electoral_data(hash, validator)) } fn cf_solana_filter_votes( @@ -1865,7 +1880,28 @@ where proposed_votes: Vec, at: Option, ) -> RpcResult> { - self.with_runtime_api(at, |api, hash| api.cf_filter_votes(hash, validator, proposed_votes)) + self.with_runtime_api(at, |api, hash| { + api.cf_solana_filter_votes(hash, validator, proposed_votes) + }) + } + + fn cf_bitcoin_electoral_data( + &self, + validator: state_chain_runtime::AccountId, + at: Option, + ) -> RpcResult> { + self.with_runtime_api(at, |api, hash| api.cf_bitcoin_electoral_data(hash, validator)) + } + + fn cf_bitcoin_filter_votes( + &self, + validator: state_chain_runtime::AccountId, + proposed_votes: Vec, + at: Option, + ) -> RpcResult> { + self.with_runtime_api(at, |api, hash| { + api.cf_bitcoin_filter_votes(hash, validator, proposed_votes) + }) } fn cf_get_vault_swap_details( diff --git a/state-chain/node/src/chain_spec.rs b/state-chain/node/src/chain_spec.rs index a9801dc356..43d6de3a65 100644 --- a/state-chain/node/src/chain_spec.rs +++ b/state-chain/node/src/chain_spec.rs @@ -21,14 +21,14 @@ use sp_core::{ Pair, Public, }; use state_chain_runtime::{ - chainflip::{solana_elections, Offence}, + chainflip::{bitcoin_elections, solana_elections, Offence}, constants::common::{ BLOCKS_PER_MINUTE_ARBITRUM, BLOCKS_PER_MINUTE_ETHEREUM, BLOCKS_PER_MINUTE_POLKADOT, BLOCKS_PER_MINUTE_SOLANA, }, opaque::SessionKeys, - AccountId, BlockNumber, FlipBalance, SetSizeParameters, Signature, SolanaElectionsConfig, - WASM_BINARY, + AccountId, BitcoinElectionsConfig, BlockNumber, FlipBalance, SetSizeParameters, Signature, + SolanaElectionsConfig, WASM_BINARY, }; use cf_utilities::clean_hex_address; @@ -377,6 +377,9 @@ pub fn inner_cf_development_config( sol_swap_endpoint_program_data_account, )), }, + BitcoinElectionsConfig { + option_initial_state: Some(bitcoin_elections::initial_state()), + }, )) .build()) } @@ -539,6 +542,9 @@ macro_rules! network_spec { sol_swap_endpoint_program_data_account, )), }, + BitcoinElectionsConfig { + option_initial_state: Some(bitcoin_elections::initial_state()), + }, )) .build()) } @@ -591,6 +597,7 @@ fn testnet_genesis( solana_safety_margin: u64, auction_bid_cutoff_percentage: Percent, solana_elections: state_chain_runtime::SolanaElectionsConfig, + bitcoin_elections: state_chain_runtime::BitcoinElectionsConfig, ) -> serde_json::Value { // Sanity Checks for (account_id, aura_id, grandpa_id) in initial_authorities.iter() { @@ -849,6 +856,9 @@ fn testnet_genesis( ..Default::default() }, solana_elections, + + // TODO: Set correct initial state + bitcoin_elections, // We can't use ..Default::default() here because chain tracking panics on default (by // design). And the way ..Default::default() syntax works is that it generates the default // value for the whole struct, not just the fields that are missing. diff --git a/state-chain/pallets/cf-elections/Cargo.toml b/state-chain/pallets/cf-elections/Cargo.toml index 7e0645591b..4c35d7c184 100644 --- a/state-chain/pallets/cf-elections/Cargo.toml +++ b/state-chain/pallets/cf-elections/Cargo.toml @@ -45,6 +45,8 @@ sp-std = { workspace = true } [dev-dependencies] cf-test-utilities = { workspace = true, default-features = true } rand = { workspace = true, features = ["std"] } +proptest = { version = "1.6"} +proptest-derive = { version = "0.5.1" } [features] default = ["std"] diff --git a/state-chain/pallets/cf-elections/src/electoral_systems.rs b/state-chain/pallets/cf-elections/src/electoral_systems.rs index c7a51f6fc8..ced6e288cc 100644 --- a/state-chain/pallets/cf-elections/src/electoral_systems.rs +++ b/state-chain/pallets/cf-elections/src/electoral_systems.rs @@ -1,3 +1,5 @@ +pub mod block_height_tracking; +pub mod block_witnesser; pub mod blockchain; pub mod composite; pub mod egress_success; diff --git a/state-chain/pallets/cf-elections/src/electoral_systems/block_height_tracking.rs b/state-chain/pallets/cf-elections/src/electoral_systems/block_height_tracking.rs new file mode 100644 index 0000000000..50d74ee68c --- /dev/null +++ b/state-chain/pallets/cf-elections/src/electoral_systems/block_height_tracking.rs @@ -0,0 +1,458 @@ +use core::{ + iter::Step, + ops::{Add, AddAssign, Range, RangeInclusive, Rem, Sub, SubAssign}, +}; + +use crate::{ + electoral_system::{ + AuthorityVoteOf, ConsensusVotes, ElectionReadAccess, ElectionWriteAccess, ElectoralSystem, + ElectoralWriteAccess, VotePropertiesOf, + }, + vote_storage::{self, VoteStorage}, + CorruptStorageError, ElectionIdentifier, +}; +use cf_chains::{ + assets::arb::Chain, + btc::BlockNumber, + witness_period::{BlockWitnessRange, BlockZero}, +}; +use cf_utilities::success_threshold_from_share_count; +use codec::{Decode, Encode}; +use consensus::{ConsensusMechanism, StagedConsensus, SupermajorityConsensus, Threshold}; +use frame_support::{ + ensure, + pallet_prelude::{MaxEncodedLen, MaybeSerializeDeserialize, Member}, + sp_runtime::traits::{AtLeast32BitUnsigned, One, Saturating}, + Parameter, +}; +use itertools::Itertools; +use primitives::{ + trim_to_length, validate_vote_and_height, ChainBlocks, Header, MergeFailure, + VoteValidationError, +}; +use scale_info::TypeInfo; +use serde::{Deserialize, Serialize}; +use sp_std::{ + collections::{btree_map::BTreeMap, vec_deque::VecDeque}, + vec::Vec, +}; +use state_machine::{Indexed, StateMachine, Validate}; + +#[cfg(test)] +use proptest_derive::Arbitrary; + +pub mod consensus; +pub mod primitives; +pub mod state_machine; +pub mod state_machine_es; + +#[cfg_attr(test, derive(Arbitrary))] +#[derive( + Debug, Clone, PartialEq, Eq, Encode, Decode, TypeInfo, Deserialize, Serialize, Ord, PartialOrd, +)] +pub struct BlockHeightTrackingProperties { + /// An election starts with a given block number, + /// meaning that engines have to submit all blocks they know of starting with this height. + pub witness_from_index: BlockNumber, +} + +#[derive( + Debug, Clone, PartialEq, Eq, Encode, Decode, TypeInfo, Deserialize, Serialize, Ord, PartialOrd, +)] +pub struct RangeOfBlockWitnessRanges { + pub witness_from_root: ChainBlockNumber, + pub witness_to_root: ChainBlockNumber, + pub witness_period: ChainBlockNumber, +} + +impl< + ChainBlockNumber: Saturating + + One + + Copy + + PartialOrd + + Step + + Into + + Sub + + Rem + + Saturating + + Eq, + > RangeOfBlockWitnessRanges +{ + pub fn try_new( + witness_from_root: ChainBlockNumber, + witness_to_root: ChainBlockNumber, + witness_period: ChainBlockNumber, + ) -> Result { + ensure!(witness_from_root <= witness_to_root, CorruptStorageError::new()); + + Ok(Self { witness_from_root, witness_to_root, witness_period }) + } + + pub fn block_witness_ranges(&self) -> Result>, ()> { + (self.witness_from_root..=self.witness_to_root) + .step_by(Into::::into(self.witness_period) as usize) + .map(|root| BlockWitnessRange::try_new(root, self.witness_period)) + .collect::, _>>() + } + + pub fn witness_to_root(&self) -> ChainBlockNumber { + self.witness_to_root + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Encode, Decode, TypeInfo, Deserialize, Serialize)] +pub enum OldChainProgress { + // Block witnesser will discard any elections that were started for this range and start them + // again since we've detected a reorg + Reorg(RangeOfBlockWitnessRanges), + // the chain is just progressing as a normal chain of hashes + Continuous(RangeOfBlockWitnessRanges), + // there was no update to the witnessed block headers + None(ChainBlockNumber), + // We are starting up and don't have consensus on a block number yet + WaitingForFirstConsensus, +} + +#[derive(Debug, Clone, PartialEq, Eq, Encode, Decode, TypeInfo, Deserialize, Serialize)] +pub enum ChainProgress { + // Block witnesser will discard any elections that were started for this range and start them + // again since we've detected a reorg + Reorg(RangeInclusive), + // the chain is just progressing as a normal chain of hashes + Continuous(RangeInclusive), + // there was no update to the witnessed block headers + None(ChainBlockNumber), + // We are starting up and don't have consensus on a block number yet + WaitingForFirstConsensus, +} + +//-------- implementation of block height tracking as a state machine -------------- + +trait BlockHeightTrait = PartialEq + Ord + Copy + Step + BlockZero; + +pub struct BlockHeightTrackingConsensus { + votes: Vec>, +} + +impl Default + for BlockHeightTrackingConsensus +{ + fn default() -> Self { + Self { votes: Default::default() } + } +} + +impl< + ChainBlockNumber: BlockHeightTrait + sp_std::fmt::Debug, + ChainBlockHash: Clone + PartialEq + Ord + sp_std::fmt::Debug, + > ConsensusMechanism for BlockHeightTrackingConsensus +{ + type Vote = InputHeaders; + type Result = InputHeaders; + type Settings = (Threshold, BlockHeightTrackingProperties); + + fn insert_vote(&mut self, vote: Self::Vote) { + self.votes.push(vote); + } + + fn check_consensus(&self, settings: &Self::Settings) -> Option { + // let num_authorities = consensus_votes.num_authorities(); + + let (threshold, properties) = settings; + + if properties.witness_from_index.is_zero() { + // This is the case for finding an appropriate block number to start witnessing from + + let mut consensus: SupermajorityConsensus<_> = SupermajorityConsensus::default(); + + for vote in &self.votes { + // we currently only count votes consisting of a single block height + // there has to be a supermajority voting for the exact same header + if vote.0.len() == 1 { + consensus.insert_vote(vote.0[0].clone()) + } + } + + consensus + .check_consensus(&threshold) + .map(|result| { + let mut headers = VecDeque::new(); + headers.push_back(result); + InputHeaders(headers) + }) + .map(|result| { + log::info!("block_height: initial consensus: {result:?}"); + result + }) + } else { + // This is the actual consensus finding, once the engine is running + + let mut consensus: StagedConsensus, usize> = + StagedConsensus::new(); + + for mut vote in self.votes.clone() { + // ensure that the vote is valid + if let Err(err) = validate_vote_and_height(properties.witness_from_index, &vote.0) { + log::warn!("received invalid vote: {err:?} "); + continue; + } + + // we count a given vote as multiple votes for all nonempty subchains + while vote.0.len() > 0 { + consensus.insert_vote((vote.0.len(), vote.clone())); + vote.0.pop_back(); + } + } + + consensus.check_consensus(&threshold).map(|result| { + log::info!( + "(witness_from: {:?}): successful consensus for ranges: {:?}..={:?}", + properties, + result.0.front(), + result.0.back() + ); + result + }) + } + } +} + +//------------------------ input headers --------------------------- +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Encode, Decode, TypeInfo, MaxEncodedLen)] +pub struct InputHeaders(pub VecDeque>); + +#[cfg(test)] +mod tests { + + use core::iter::Step; + + use crate::electoral_systems::block_height_tracking::state_machine::{Indexed, Validate}; + use proptest::{ + prelude::{any, prop, Arbitrary, Just, Strategy}, + prop_oneof, proptest, + }; + + use super::{ + primitives::Header, state_machine::StateMachine, BHWState, BlockHeightTrackingDSM, + InputHeaders, + }; + + pub fn arb_input_headers( + witness_from: N, + ) -> impl Strategy> { + // TODO: handle the case where `witness_from` = 0. + + prop::collection::vec(any::(), 2..10).prop_map(move |data| { + let headers = + data.iter().zip(data.iter().skip(1)).enumerate().map(|(ix, (h0, h1))| Header { + block_height: N::forward(witness_from.clone(), ix), + hash: h1.clone(), + parent_hash: h0.clone(), + }); + InputHeaders::(headers.collect()) + }) + } + + pub fn arb_state( + ) -> impl Strategy> { + prop_oneof![ + Just(BHWState::Starting), + (any::(), any::()).prop_flat_map(move |(n, is_reorg_without_known_root)| { + arb_input_headers(n).prop_map(move |headers| { + let witness_from = if is_reorg_without_known_root { + headers.0.front().unwrap().block_height.clone() + } else { + N::forward(headers.0.back().unwrap().block_height.clone(), 1) + }; + BHWState::Running { headers: headers.0, witness_from } + }) + }), + ] + } + + #[test] + pub fn test_dsm() { + BlockHeightTrackingDSM::<6, u32, bool>::test(arb_state(), |index| { + arb_input_headers(index.witness_from_index).boxed() + }); + } +} + +impl Indexed for InputHeaders { + type Index = BlockHeightTrackingProperties; + + fn has_index(&self, base: &Self::Index) -> bool { + if base.witness_from_index.is_zero() { + true + } else { + match self.0.front() { + Some(first) => first.block_height == base.witness_from_index, + None => false, + } + } + } +} + +impl Validate for InputHeaders { + type Error = VoteValidationError; + fn is_valid(&self) -> Result<(), Self::Error> { + if self.0.len() == 0 { + Err(VoteValidationError::EmptyVote) + } else { + ChainBlocks { headers: self.0.clone() }.is_valid() + } + } +} + +//------------------------ state --------------------------- + +#[derive( + Debug, Clone, PartialEq, Eq, Encode, Decode, TypeInfo, Deserialize, Serialize, Ord, PartialOrd, +)] +pub enum BHWState { + Starting, + Running { headers: VecDeque>, witness_from: N }, +} + +impl Default for BHWState { + fn default() -> Self { + Self::Starting + } +} + +impl Validate for BHWState { + type Error = &'static str; + + fn is_valid(&self) -> Result<(), Self::Error> { + match self { + BHWState::Starting => Ok(()), + + BHWState::Running { headers, witness_from: _ } => + if headers.len() > 0 { + InputHeaders(headers.clone()) + .is_valid() + .map_err(|_| "blocks should be continuous") + } else { + Err("Block height tracking state should always be non-empty after start-up.") + }, + } + } +} + +//------------------------ output --------------------------- +impl Validate for Result { + type Error = B; + + fn is_valid(&self) -> Result<(), Self::Error> { + match self { + Ok(_) => Ok(()), + Err(err) => Err(err.clone()), + } + } +} + +//------------------------ state machine --------------------------- +pub struct BlockHeightTrackingDSM { + _phantom: core::marker::PhantomData<(ChainBlockNumber, ChainBlockHash)>, +} + +impl< + const SAFETY_MARGIN: usize, + H: PartialEq + Eq + Clone + sp_std::fmt::Debug + 'static, + N: BlockHeightTrait + sp_std::fmt::Debug + 'static, + > StateMachine for BlockHeightTrackingDSM +{ + type State = BHWState; + type DisplayState = ChainProgress; + type Input = InputHeaders; + type Output = Result, &'static str>; + + fn input_index(s: &Self::State) -> ::Index { + let witness_from_index = match s { + BHWState::Starting => N::zero(), + BHWState::Running { headers: _, witness_from } => witness_from.clone(), + }; + BlockHeightTrackingProperties { witness_from_index } + } + + // specification for step function + #[cfg(test)] + fn step_specification(before: &Self::State, input: &Self::Input, after: &Self::State) -> bool { + match after { + // the starting case should only ever be possible as the `before` state. + BHWState::Starting => false, + + // otherwise we know that the after state will be running + BHWState::Running { headers, witness_from } => match before { + BHWState::Starting => true, + BHWState::Running { + headers: before_headers, + witness_from: before_witness_from, + } => + (*witness_from == before_headers.front().unwrap().block_height) || + (*witness_from == N::forward(headers.back().unwrap().block_height, 1)), + }, + } + } + + fn step(s: &mut Self::State, new_headers: Self::Input) -> Self::Output { + match s { + BHWState::Starting => { + let first = new_headers.0.front().unwrap().block_height; + let last = new_headers.0.back().unwrap().block_height; + *s = BHWState::Running { + headers: new_headers.0.clone(), + witness_from: N::forward(last, 1), + }; + Ok(ChainProgress::Continuous(first..=last)) + }, + + BHWState::Running { headers, witness_from } => { + let mut chainblocks = ChainBlocks { headers: headers.clone() }; + + match chainblocks.merge(new_headers.0) { + Ok(merge_info) => { + log::info!( + "added new blocks: {:?}, replacing these blocks: {:?}", + merge_info.added, + merge_info.removed + ); + + let safe_headers = trim_to_length(&mut chainblocks.headers, SAFETY_MARGIN); + + let current_state = chainblocks.current_state_as_no_chain_progress(); + + *headers = chainblocks.headers; + *witness_from = N::forward(headers.back().unwrap().block_height, 1); + + // if we merge after a reorg, and the blocks we got are the same + // as the ones we previously had, then `into_chain_progress` might + // return `None`. In that case we return our current state. + Ok(merge_info.into_chain_progress().unwrap_or(current_state)) + }, + Err(MergeFailure::ReorgWithUnknownRoot { + new_block, + existing_wrong_parent, + }) => { + log::info!("detected a reorg: got block {new_block:?} whose parent hash does not match the parent block we have recorded: {existing_wrong_parent:?}"); + *witness_from = headers.front().unwrap().block_height; + Ok(chainblocks.current_state_as_no_chain_progress()) + }, + + Err(MergeFailure::InternalError(reason)) => { + log::error!("internal error in block height tracker: {reason}"); + Err("internal error in block height tracker") + }, + } + }, + } + } + + fn get(s: &Self::State) -> Self::DisplayState { + match s { + BHWState::Starting => ChainProgress::WaitingForFirstConsensus, + BHWState::Running { headers, witness_from: _ } => + ChainProgress::None(headers.back().unwrap().block_height), + } + } +} diff --git a/state-chain/pallets/cf-elections/src/electoral_systems/block_height_tracking/consensus.rs b/state-chain/pallets/cf-elections/src/electoral_systems/block_height_tracking/consensus.rs new file mode 100644 index 0000000000..899fc12b3a --- /dev/null +++ b/state-chain/pallets/cf-elections/src/electoral_systems/block_height_tracking/consensus.rs @@ -0,0 +1,144 @@ +use core::ops::{Add, AddAssign, Range, RangeInclusive, Sub, SubAssign}; + +use crate::{ + electoral_system::{ + AuthorityVoteOf, ConsensusVotes, ElectionReadAccess, ElectionWriteAccess, ElectoralSystem, + ElectoralWriteAccess, VotePropertiesOf, + }, + vote_storage::{self, VoteStorage}, + CorruptStorageError, ElectionIdentifier, +}; +use cf_chains::btc::BlockNumber; +use cf_utilities::success_threshold_from_share_count; +use codec::{Decode, Encode}; +use frame_support::{ + ensure, + pallet_prelude::{MaybeSerializeDeserialize, Member}, + sp_runtime::traits::AtLeast32BitUnsigned, + Parameter, +}; +use itertools::Itertools; +use scale_info::TypeInfo; +use serde::{Deserialize, Serialize}; +use sp_std::{ + collections::{btree_map::BTreeMap, vec_deque::VecDeque}, + vec::Vec, +}; + +/// Abstract consensus mechanism. +/// +/// This trait is an abstraction over simple consensus mechanisms, +/// where there is the concept of incrementally adding votes, +/// and checking if the votes result in consensus. +pub trait ConsensusMechanism: Default { + /// type of votes. + type Vote; + + /// result type of the consensus. + type Result; + + /// additional information required to check consensus + type Settings; + + fn insert_vote(&mut self, vote: Self::Vote); + fn check_consensus(&self, settings: &Self::Settings) -> Option; +} + +//----------------------------------------------- +// majority consensus + +/// Simple implementation of a (super-)majority consensus +pub struct SupermajorityConsensus { + votes: BTreeMap, +} + +pub struct Threshold { + pub threshold: u32, +} + +impl Default for SupermajorityConsensus { + fn default() -> Self { + Self { votes: Default::default() } + } +} + +impl ConsensusMechanism for SupermajorityConsensus { + type Vote = Vote; + type Result = Vote; + type Settings = Threshold; + + fn insert_vote(&mut self, vote: Self::Vote) { + if let Some(count) = self.votes.get_mut(&vote) { + *count += 1; + } else { + self.votes.insert(vote, 1); + } + } + + fn check_consensus(&self, settings: &Self::Settings) -> Option { + let best = self.votes.iter().last(); + + if let Some((best_vote, best_count)) = best { + if best_count >= &settings.threshold { + return Some(best_vote.clone()); + } + } + + return None; + } +} + +//----------------------------------------------- +// staged consensus + +/// Staged consensus. +/// +/// Votes are indexed by stages, and each stage is evaluated +/// separately. Evaluation happens in reverse order of the stage index, +/// i.e. the highest stage which achieves consensus determines the result. +/// If no stage achieves consensus, the result is inconclusive. +pub struct StagedConsensus { + stages: BTreeMap, +} + +impl StagedConsensus { + pub fn new() -> Self { + Self { stages: BTreeMap::new() } + } +} + +impl Default for StagedConsensus { + fn default() -> Self { + Self { stages: Default::default() } + } +} + +impl ConsensusMechanism + for StagedConsensus +{ + type Result = Stage::Result; + type Vote = (Index, Stage::Vote); + type Settings = Stage::Settings; + + fn insert_vote(&mut self, (index, vote): Self::Vote) { + if let Some(stage) = self.stages.get_mut(&index) { + stage.insert_vote(vote) + } else { + let mut stage = Stage::default(); + stage.insert_vote(vote); + self.stages.insert(index, stage); + } + } + + fn check_consensus(&self, settings: &Self::Settings) -> Option { + // we check all stages starting with the highest index, + // the first one that has consensus wins + for (_, stage) in self.stages.iter().rev() { + if let Some(result) = stage.check_consensus(settings) { + return Some(result); + } + } + + None + } +} diff --git a/state-chain/pallets/cf-elections/src/electoral_systems/block_height_tracking/primitives.rs b/state-chain/pallets/cf-elections/src/electoral_systems/block_height_tracking/primitives.rs new file mode 100644 index 0000000000..bd5d3a2813 --- /dev/null +++ b/state-chain/pallets/cf-elections/src/electoral_systems/block_height_tracking/primitives.rs @@ -0,0 +1,259 @@ +use core::{ + iter::Step, + ops::{Add, AddAssign, Range, RangeInclusive, Rem, Sub, SubAssign}, +}; + +use crate::{ + electoral_system::{ + AuthorityVoteOf, ConsensusVotes, ElectionReadAccess, ElectionWriteAccess, ElectoralSystem, + ElectoralWriteAccess, VotePropertiesOf, + }, + electoral_systems::block_height_tracking::RangeOfBlockWitnessRanges, + vote_storage::{self, VoteStorage}, + CorruptStorageError, ElectionIdentifier, +}; +use cf_chains::{btc::BlockNumber, witness_period::BlockWitnessRange}; +use cf_utilities::success_threshold_from_share_count; +use codec::{Decode, Encode}; +use frame_support::{ + ensure, + pallet_prelude::{MaybeSerializeDeserialize, Member}, + sp_runtime::traits::{AtLeast32BitUnsigned, One, Saturating}, + Parameter, +}; +use itertools::Itertools; +use scale_info::TypeInfo; +use serde::{Deserialize, Serialize}; +use sp_std::{ + collections::{btree_map::BTreeMap, vec_deque::VecDeque}, + vec::Vec, +}; + +use super::{state_machine::Validate, BlockHeightTrait, ChainProgress}; + +#[derive( + Debug, Clone, PartialEq, Eq, Encode, Decode, TypeInfo, Deserialize, Serialize, Ord, PartialOrd, +)] +pub struct Header { + pub block_height: BlockNumber, + pub hash: BlockHash, + pub parent_hash: BlockHash, +} + +#[derive( + Debug, Clone, PartialEq, Eq, Encode, Decode, TypeInfo, Deserialize, Serialize, Ord, PartialOrd, +)] +pub struct MergeInfo { + pub removed: VecDeque>, + pub added: VecDeque>, +} + +impl MergeInfo { + pub fn into_chain_progress(&self) -> Option> { + if let (Some(first_added), Some(last_added)) = (self.added.front(), self.added.back()) { + if let (Some(first_removed), Some(last_removed)) = + (self.removed.front(), self.removed.back()) + { + Some(ChainProgress::Reorg(first_added.block_height..=last_added.block_height)) + } else { + Some(ChainProgress::Continuous(first_added.block_height..=last_added.block_height)) + } + } else { + None + } + } + + pub fn get_added_block_heights(&self) -> Option> { + if let (Some(first), Some(last)) = (self.added.front(), self.added.back()) { + Some(first.block_height..=last.block_height) + } else { + None + } + } +} + +pub enum MergeFailure { + // If we get a new range of blocks, [lowest_new_block, ...], where the parent of + // `lowest_new_block` should, by block number, be `existing_wrong_parent`, but who's + // hash doesn't match with `lowest_new_block`'s parent hash. + ReorgWithUnknownRoot { new_block: Header, existing_wrong_parent: Option> }, + + // /// This means that we have requested blocks which start higher than our last highest + // block, /// should not happen if everything goes well. + // MissingBlocks { range: Range}, + InternalError(&'static str), +} + +pub fn extract_common_prefix(a: &mut VecDeque, b: &mut VecDeque) -> VecDeque { + let mut prefix = VecDeque::new(); + + while a.front().is_some() && (a.front() == b.front()) { + prefix.push_back(a.pop_front().unwrap()); + b.pop_front(); + } + + prefix +} + +pub fn trim_to_length(items: &mut VecDeque, target_length: usize) -> VecDeque { + let mut result = VecDeque::new(); + while items.len() > target_length { + if let Some(front) = items.pop_front() { + result.push_back(front); + } + } + result +} + +pub fn head_and_tail(mut items: &VecDeque) -> Option<(A, VecDeque)> { + let items = items.clone(); + items.clone().pop_front().map(|head| (head, items)) +} + +#[derive(Debug)] +pub enum VoteValidationError { + BlockHeightsNotContinuous, + ParentHashMismatch, + EmptyVote, +} + +#[derive( + Debug, Clone, PartialEq, Eq, Encode, Decode, TypeInfo, Deserialize, Serialize, Ord, PartialOrd, +)] +/// Invariant: +/// This should always be a continuous chain of block headers +pub struct ChainBlocks { + pub headers: VecDeque>, +} + +impl ChainBlocks { + pub fn current_state_as_no_chain_progress(&self) -> ChainProgress { + if let Some(last) = self.headers.back() { + ChainProgress::None(last.block_height) + } else { + ChainProgress::WaitingForFirstConsensus + } + } + + pub fn first_height(&self) -> Option { + self.headers.front().map(|h| h.block_height) + } +} + +impl Validate for ChainBlocks +where + H: PartialEq + Clone, + N: PartialEq + Ord + Copy + BlockHeightTrait, +{ + type Error = VoteValidationError; + + fn is_valid(&self) -> Result<(), Self::Error> { + let mut pairs = self.headers.iter().zip(self.headers.iter().skip(1)); + + if !pairs.clone().all(|(a, b)| N::forward(a.block_height, 1) == b.block_height) { + Err(VoteValidationError::BlockHeightsNotContinuous) + } else if !pairs.all(|(a, b)| a.hash == b.parent_hash) { + Err(VoteValidationError::ParentHashMismatch) + } else { + Ok(()) + } + } +} + +pub fn validate_vote_and_height( + next_height: N, + other: &VecDeque>, +) -> Result<(), VoteValidationError> +where + N: Ord + Copy + BlockHeightTrait, +{ + // a vote has to be nonempty + if other.len() == 0 { + return Err(VoteValidationError::EmptyVote) + } + + // a vote has to start with the next block we expect + if other.front().unwrap().block_height != next_height { + return Err(VoteValidationError::BlockHeightsNotContinuous) + } + + // a vote has to be continous + ChainBlocks { headers: other.clone() }.is_valid() // validate_continous_headers(other) +} + +pub enum ChainBlocksMergeResult { + Extended { new_highest: N }, + FailedMissing { range: Range }, +} + +impl ChainBlocks { + // + // We have the following assumptions: + // + // Assumptions: + // 1. `other`: a. is well-formed (contains incrementing heights) b. is nonempty + // 2. `self`: a. is well-formed (contains incrementing heights) + // 3. one of the following cases holds + // - case 1: `other` starts exactly after `self` ends OR self is `Default::default()` + // - case 2: (`self` and `other` start at the same block) AND (self is nonempty) + // + pub fn merge( + &mut self, + other: VecDeque>, + ) -> Result, MergeFailure> { + // assumption (1b) + let other_head = other + .front() + .ok_or(MergeFailure::InternalError("expected other to not be empty!".into()))?; + + let self_next_height = N::forward(self.headers.back().unwrap().block_height, 1); + + if self_next_height == other_head.block_height { + // this is "assumption (3): case 1" + // + // This means that our new blocks start exactly after the ones we already have, + // so we have to append them to our existing ones. And make sure that the hash/parent + // hashes match. + + if match self.headers.back() { + None => true, + Some(h) => other_head.parent_hash == h.hash, + } { + // self.next_height = other.back().unwrap().block_height + 1u32.into(); + self.headers.append(&mut other.clone()); + Ok(MergeInfo { removed: VecDeque::new(), added: other }) + } else { + Err(MergeFailure::ReorgWithUnknownRoot { + new_block: other_head.clone(), + existing_wrong_parent: self.headers.back().cloned(), + }) + } + } else { + // this is "assumption (3): case 2" + + let self_head = self + .headers + .front() + .ok_or(MergeFailure::InternalError( + "case 2: expected self to not be empty!".into(), + ))? + .clone(); + + if self_head.block_height == other_head.block_height { + // extract common prefix of headers + let mut self_headers = self.headers.clone(); + let mut other_headers = other.clone(); + let common_headers = extract_common_prefix(&mut self_headers, &mut other_headers); + + // set headers to `common_headers` + `other_headers` + self.headers = common_headers; + self.headers.append(&mut other_headers.clone()); + // self.next_height = self_head.block_height + (self.headers.len() as u32).into(); + + Ok(MergeInfo { removed: self_headers, added: other_headers }) + } else { + Err(MergeFailure::InternalError("expected either case 1 or case 2 to hold!".into())) + } + } + } +} diff --git a/state-chain/pallets/cf-elections/src/electoral_systems/block_height_tracking/state_machine.rs b/state-chain/pallets/cf-elections/src/electoral_systems/block_height_tracking/state_machine.rs new file mode 100644 index 0000000000..4d23db26a7 --- /dev/null +++ b/state-chain/pallets/cf-elections/src/electoral_systems/block_height_tracking/state_machine.rs @@ -0,0 +1,137 @@ +use crate::electoral_system::ElectoralSystem; + +#[cfg(test)] +use proptest::prelude::{BoxedStrategy, Just, Strategy}; +#[cfg(test)] +use proptest::test_runner::TestRunner; + +/// A type which has an associated index type. +/// This effectively models dependent sum types. +pub trait Indexed { + type Index; + fn has_index(&self, base: &Self::Index) -> bool; +} + +pub type IndexOf = ::Index; + +/// A type which can be validated. +pub trait Validate { + type Error: sp_std::fmt::Debug; + fn is_valid(&self) -> Result<(), Self::Error>; +} + +/// A trait for implementing state machines, in particular used for simple electoral systems. +/// The model currently only supports electoral systems with a single ongoing election at any given +/// time. (Extending it to multiple ongoing elections is WIP.) +/// +/// An electoral system is essentialy a state machine: it keeps track of an internal state, +/// processes votes as input, and produces a result in every `on_finalize` call. +/// +/// Thus the basic structure is that we have three associated types: +/// - `State` +/// - `Input` +/// - `Output` +/// and a function `step(&mut State, Input) -> Output`. +/// +/// ## Mapping to elections +/// The `Input` type is the type of votes. Election properties are given by the associated type +/// `Input::Index`, where the function `has_index(vote: &Input, election_properties: &Input::Index) +/// -> bool` is used to determine whether a given vote is valid for given election properties. +/// +/// The definition of the state machine requires a function `input_index(&State) -> Input::Index` +/// which describes for a given state, which index we expect the next input to have (in other words, +/// for which election properties we want to get a vote next). This means that creation of elections +/// is handled indirectly: The state machine merely has to transition into a state with the correct +/// `input_index`, an election with these election properties is going to be created automatically. +/// +/// ## Idle results +/// When there is no consensus, the electoral system still has to return sth in its `on_finalize` +/// function. This value is provided by the `get(&State) -> DisplayState` function. The associated +/// `DisplayState` type is an arbitrary "summary" of the current state, meant for consumers of the +/// `on_finalize` result. +/// +/// Note: it might be that this functionality is going to be modelled differently in the future. +/// +/// ## Validation +/// In the case of the BHW, both the `Input`, as well as the `State` contain sequences of headers +/// which need to have sequential block heights and matching hashes. In order to provide a coherent +/// interface for checking these, we require theses associated types to implement the trait +/// `Validate`. We also require `Validate` on the `Output` type. +/// +/// ## Testing +/// The state machine trait provides a convenience method `test(states, inputs)` for testing a given +/// state machine. Here `states` and `inputs` are strategies for generating states and inputs, and +/// the function runs the `step` function on randomly generated input values, while ensuring that +/// everything is valid. +pub trait StateMachine: 'static { + type Input: Validate + Indexed; + type Output: Validate; + type State: Validate; + type DisplayState; + + /// To every state, this function associates an input index which + /// describes what kind of input we want to receive next. + fn input_index(s: &Self::State) -> IndexOf; + + /// The state transition function, it takes the state, and an input, + /// and assumes that both state and index are valid, and furthermore + /// that the input has the index `input_index(s)`. + fn step(s: &mut Self::State, i: Self::Input) -> Self::Output; + + /// Project the current state to a "DisplayState" value. + fn get(s: &Self::State) -> Self::DisplayState; + + /// Contains an optional specification of the `step` function. + /// Takes a state, input and next state as arguments. During testing it is verified + /// that the resulting state after the step function always fulfills this specification. + #[cfg(test)] + fn step_specification(before: &Self::State, input: &Self::Input, after: &Self::State) -> bool { + true + } + + /// Given strategies `states` and `inputs` for generating arbitrary, valid values, runs the step + /// function and ensures that it's result is always valid and additionally fulfills the + /// `step_specification`. + #[cfg(test)] + fn test( + states: impl Strategy, + inputs: impl Fn(IndexOf) -> BoxedStrategy, + ) where + Self::State: sp_std::fmt::Debug + Clone, + Self::Input: sp_std::fmt::Debug + Clone, + { + let mut runner = TestRunner::default(); + + runner + .run( + &(states.prop_flat_map(|state| { + (Just(state.clone()), inputs(Self::input_index(&state))) + })), + |(mut state, input)| { + // ensure that inputs are well formed + assert!(state.is_valid().is_ok(), "input state not valid"); + assert!(input.is_valid().is_ok(), "input not valid"); + assert!(input.has_index(&Self::input_index(&state)), "input has wrong index"); + + // backup state + let prev_state = state.clone(); + + // run step function and ensure that output is valid + assert!( + Self::step(&mut state, input.clone()).is_valid().is_ok(), + "step function failed" + ); + + // ensure that state is still well formed + assert!(state.is_valid().is_ok(), "state after step function is not valid"); + assert!( + Self::step_specification(&prev_state, &input, &state), + "step function does not fulfill spec" + ); + + Ok(()) + }, + ) + .unwrap(); + } +} diff --git a/state-chain/pallets/cf-elections/src/electoral_systems/block_height_tracking/state_machine_es.rs b/state-chain/pallets/cf-elections/src/electoral_systems/block_height_tracking/state_machine_es.rs new file mode 100644 index 0000000000..1dfc3b1f79 --- /dev/null +++ b/state-chain/pallets/cf-elections/src/electoral_systems/block_height_tracking/state_machine_es.rs @@ -0,0 +1,174 @@ +use cf_utilities::success_threshold_from_share_count; +use frame_support::{ + pallet_prelude::{MaybeSerializeDeserialize, Member}, + Parameter, +}; +use itertools::{Either, Itertools}; +use sp_std::vec::Vec; + +use crate::{ + electoral_system::{ElectionWriteAccess, ElectoralSystem}, + vote_storage, CorruptStorageError, +}; + +use super::{ + consensus::{ConsensusMechanism, Threshold}, + state_machine::{Indexed, StateMachine, Validate}, +}; + +pub trait IntoResult { + type Ok; + type Err; + + fn into_result(self) -> Result; +} + +impl IntoResult for Result { + type Ok = A; + type Err = B; + fn into_result(self) -> Result { + self + } +} + +/// Creates an Electoral System from a given state machine +/// and a given consensus mechanism. +pub struct DsmElectoralSystem< + Type, + ValidatorId: Member + Parameter + Ord + MaybeSerializeDeserialize, + Settings, + Consensus, +> { + _phantom: core::marker::PhantomData<(Type, ValidatorId, Settings, Consensus)>, +} + +impl ElectoralSystem + for DsmElectoralSystem +where + SM: StateMachine, + ValidatorId: Member + Parameter + Ord + MaybeSerializeDeserialize, + Settings: Member + Parameter + MaybeSerializeDeserialize + Eq, + C: ConsensusMechanism< + Vote = SM::Input, + Result = SM::Input, + Settings = (Threshold, ::Index), + > + 'static, + ::Index: Clone + Member + Parameter + sp_std::fmt::Debug, + SM::State: MaybeSerializeDeserialize + Member + Parameter + Eq + sp_std::fmt::Debug, + SM::Input: Indexed + Clone + Member + Parameter, + SM::Output: IntoResult, + ::Err: sp_std::fmt::Debug, +{ + type ValidatorId = ValidatorId; + type ElectoralUnsynchronisedState = SM::State; + type ElectoralUnsynchronisedStateMapKey = (); + type ElectoralUnsynchronisedStateMapValue = (); + + type ElectoralUnsynchronisedSettings = (); + type ElectoralSettings = Settings; + type ElectionIdentifierExtra = (); + type ElectionProperties = ::Index; + type ElectionState = (); + type Vote = vote_storage::bitmap::Bitmap; + type Consensus = SM::Input; + type OnFinalizeContext = (); + + // we return either the state if no input was processed, + // or the output produced by the state machine + type OnFinalizeReturn = Either::Ok>; + + fn generate_vote_properties( + _election_identifier: crate::electoral_system::ElectionIdentifierOf, + _previous_vote: Option<( + crate::electoral_system::VotePropertiesOf, + crate::electoral_system::AuthorityVoteOf, + )>, + _vote: &::PartialVote, + ) -> Result, crate::CorruptStorageError> { + Ok(()) + } + + fn on_finalize< + ElectoralAccess: crate::electoral_system::ElectoralWriteAccess + 'static, + >( + election_identifiers: Vec>, + context: &Self::OnFinalizeContext, + ) -> Result { + if let Some(election_identifier) = election_identifiers + .into_iter() + .at_most_one() + .map_err(|_| CorruptStorageError::new())? + { + let election_access = ElectoralAccess::election_mut(election_identifier); + + // if we have consensus, we can pass it to the state machine's step function + if let Some(input) = election_access.check_consensus()?.has_consensus() { + let (next_input_index, output) = + ElectoralAccess::mutate_unsynchronised_state(|state| { + // call the state machine + let output = SM::step(state, input); + + // if we have been successful, get the input index of the new state + match output.into_result() { + Ok(output) => Ok((SM::input_index(state), output)), + Err(err) => { + log::error!("Electoral system moved into a bad state: {err:?}"); + Err(CorruptStorageError::new()) + }, + } + })?; + + // delete the old election and create a new one with the new input index + election_access.delete(); + ElectoralAccess::new_election((), next_input_index, ())?; + + Ok(Either::Right(output)) + } else { + // if there is no consensus, simply get the current `DisplayState` of the SM. + + log::info!("No consensus could be reached!"); + Ok(Either::Left(SM::get(&ElectoralAccess::unsynchronised_state()?))) + } + } else { + // if there is no election going on, we create an election corresponding to the + // current state. + + log::info!("Starting new election with value because no elections exist"); + + let state = ElectoralAccess::unsynchronised_state()?; + + ElectoralAccess::new_election((), SM::input_index(&state), ())?; + Ok(Either::Left(SM::get(&state))) + } + } + + fn check_consensus< + ElectionAccess: crate::electoral_system::ElectionReadAccess, + >( + election_access: &ElectionAccess, + // This is the consensus as of the last time the consensus was checked. Note this is *NOT* + // the "last" consensus, i.e. this can be `None` even if on some previous check we had + // consensus, but it was subsequently lost. + _previous_consensus: Option<&Self::Consensus>, + consensus_votes: crate::electoral_system::ConsensusVotes, + ) -> Result, crate::CorruptStorageError> { + let properties = election_access.properties()?; + let mut consensus = C::default(); + let num_authorities = consensus_votes.num_authorities(); + + for vote in consensus_votes.active_votes() { + // insert vote if it is valid for the given properties + if vote.is_valid().is_ok() && vote.has_index(&properties) { + log::info!("inserting vote {vote:?}"); + consensus.insert_vote(vote); + } else { + log::warn!("Received invalid vote: expected base {properties:?} but vote was not in fiber ({:?})", vote); + } + } + + Ok(consensus.check_consensus(&( + Threshold { threshold: success_threshold_from_share_count(num_authorities) }, + properties, + ))) + } +} diff --git a/state-chain/pallets/cf-elections/src/electoral_systems/block_witnesser.rs b/state-chain/pallets/cf-elections/src/electoral_systems/block_witnesser.rs new file mode 100644 index 0000000000..52dcb206f6 --- /dev/null +++ b/state-chain/pallets/cf-elections/src/electoral_systems/block_witnesser.rs @@ -0,0 +1,388 @@ +use core::{cmp::min, ops::RangeInclusive}; + +use crate::{ + electoral_system::{ + AuthorityVoteOf, ConsensusVotes, ElectionIdentifierOf, ElectionReadAccess, + ElectionWriteAccess, ElectoralSystem, ElectoralWriteAccess, VotePropertiesOf, + }, + vote_storage::{self, VoteStorage}, + CorruptStorageError, SharedDataHash, +}; +use cf_chains::witness_period::BlockWitnessRange; +use cf_utilities::success_threshold_from_share_count; +use codec::{Decode, Encode}; +use frame_support::{ + ensure, + pallet_prelude::{MaybeSerializeDeserialize, Member}, + sp_runtime::Saturating, + Parameter, +}; +use scale_info::TypeInfo; +use serde::{Deserialize, Serialize}; +use sp_core::bounded::alloc::collections::BTreeSet; +use sp_std::{collections::btree_map::BTreeMap, vec::Vec}; + +use super::block_height_tracking::OldChainProgress as ChainProgress; + +// Rather than push processing outside, we could provide an evaluation function that is called +// to determine whether to process or not. This keeps things encapsulated a little better. + +// We create an election with all the channels for a particular block. Then when everyone votes +// there is nothing to witness for that election i.e. for that block then it closes the election, so +// we don't duplicate that much state at all... unless on recovery. + +// How do we create elections for channels that only existed at passed state? - We manage channel +// lifetimes in the ES. Then we don't prematurely expire when we're in safe mode. The channels +// themselves can live outside the ES, but their lifetimes is managed form within the ES. We just +// need to know the id to lookup the channel and its lifetime (opened_at, closed_at). + +// If there are no channels, we don't have any elections. + +// safety margin??? +// Double witnessing??? - should be handled by the downstream. E.g. dispatching a second boost to +// the ingress egress should be handled by ingress egress, same way it is now. + +// NB: We only worry about safety margins in the on-consensus hook. Chain tracking pushes the latest +// block number, potentially with gaps which we fill. The safety is determined by the dispatching +// action, this is how we can achieve dynamic, amount based safety margins. +pub struct BlockWitnesser +{ + _phantom: core::marker::PhantomData<( + Chain, + BlockData, + Properties, + ValidatorId, + OnConsensus, + ElectionGenerator, + )>, +} + +pub trait ProcessBlockData { + /// Process the block data and return the unprocessed data. It's possible to have received data + /// for the same block twice, in the case of a reorg. It is up to the implementor of this trait + /// to handle this case. + fn process_block_data( + chain_block_number: ChainBlockNumber, + // Any data associated with any blocks *before* + // this block has been processed, and can therefore be safely removed. + // This is a min(earliest open election, earliest unprocessed block data). Since any blocks + // before this have already been processed. + earliest_unprocessed_block: ChainBlockNumber, + block_data: Vec<(ChainBlockNumber, BlockData)>, + ) -> Vec<(ChainBlockNumber, BlockData)>; +} + +/// Allows external/runtime/implementation to return the properties that the election should use. +/// This means each instantiation of the block witnesser can control how the properties are +/// generated, and allows for easier testing of this hook externally vs. actually creating the new +/// election inside this hook. +pub trait BlockElectionPropertiesGenerator { + fn generate_election_properties(root_block_to_witness: ChainBlockNumber) -> Properties; +} + +pub type ElectionCount = u16; + +#[derive( + Debug, Clone, PartialEq, Eq, Encode, Decode, TypeInfo, Deserialize, Serialize, Default, +)] +pub struct BlockWitnesserSettings { + // We don't want to start too many elections at once, as this could overload the engines. + // e.g. If we entered safe mode for a long time and then missed 1000 blocks, without this, we + // would start 1000 elections at the same time. Instead, we throttle the recovery. + pub max_concurrent_elections: ElectionCount, +} + +#[derive( + Debug, Clone, PartialEq, Eq, Encode, Decode, TypeInfo, Deserialize, Serialize, Default, +)] +pub struct BlockWitnesserState { + // The last block where we know that we have processed everything from.... + // what about a reorg?????? + pub last_block_election_emitted_for: ChainBlockNumber, + + // TODO: Ensure that even empty data is passed through here with a test. + // The block roots (of a block range) that we received non empty block data for, but still + // requires processing. + // NOTE: It is possible for block data to arrive and then be partially processed. In this case, + // the block will still be here until there is no more block data for this block root to + // process. + pub unprocessed_data: Vec<(ChainBlockNumber, BlockData)>, + + // What block numbers do we have open elections for? + pub elections_open_for: BTreeSet, +} + +impl< + Chain: cf_chains::Chain, + BlockData: Member + Parameter + Eq + MaybeSerializeDeserialize, + Properties: Parameter + Member, + ValidatorId: Member + Parameter + Ord + MaybeSerializeDeserialize, + BlockDataProcessor: ProcessBlockData<::ChainBlockNumber, BlockData> + 'static, + ElectionGenerator: BlockElectionPropertiesGenerator< + ::ChainBlockNumber, + Properties, + > + 'static, + > ElectoralSystem + for BlockWitnesser< + Chain, + BlockData, + Properties, + ValidatorId, + BlockDataProcessor, + ElectionGenerator, + > +{ + type ValidatorId = ValidatorId; + // Store the last processed block number, number of, and the number of open elections. + type ElectoralUnsynchronisedState = + BlockWitnesserState<::ChainBlockNumber, BlockData>; + + // We store all the unprocessed block data here, including the most recently added block data, + // so it can be used in the OnBlockConsensus + type ElectoralUnsynchronisedStateMapKey = (); + type ElectoralUnsynchronisedStateMapValue = (); + + type ElectoralUnsynchronisedSettings = BlockWitnesserSettings; + type ElectoralSettings = (); + type ElectionIdentifierExtra = (); + // The first item is the block number we wish to witness, the second is something else about + // that block we want to witness. e.g. all the deposit channel addresses that are active at + // that block. + type ElectionProperties = + (BlockWitnessRange<::ChainBlockNumber>, Properties); + type ElectionState = (); + type Vote = vote_storage::bitmap::Bitmap; + type Consensus = BlockData; + + // TODO: Use a specialised range type that accounts for the witness period? + type OnFinalizeContext = ChainProgress<::ChainBlockNumber>; + type OnFinalizeReturn = (); + + fn generate_vote_properties( + _election_identifier: ElectionIdentifierOf, + _previous_vote: Option<(VotePropertiesOf, AuthorityVoteOf)>, + _vote: &::PartialVote, + ) -> Result, CorruptStorageError> { + Ok(()) + } + + fn is_vote_desired>( + _election_access: &ElectionAccess, + _current_vote: Option<(VotePropertiesOf, AuthorityVoteOf)>, + ) -> Result { + Ok(true) + } + + fn on_finalize + 'static>( + election_identifiers: Vec>, + chain_progress: &Self::OnFinalizeContext, + ) -> Result { + let BlockWitnesserState { + mut last_block_election_emitted_for, + mut elections_open_for, + mut unprocessed_data, + } = ElectoralAccess::unsynchronised_state()?; + + let mut remaining_election_identifiers = election_identifiers.clone(); + + let last_seen_root = match chain_progress { + ChainProgress::WaitingForFirstConsensus => { + log::info!("Waiting for first consensus"); + return Ok(()) + }, + ChainProgress::Reorg(reorg_range) => { + // We ensure that a reorg always includes the block that we last emitted an election + // for. Implying that new forks are at least as long as the previous chain we had + // knowledge of. + ensure!( + reorg_range.witness_to_root() >= last_block_election_emitted_for, + CorruptStorageError::new() + ); + log::info!("Got a reorg: {:?}", reorg_range); + // println!("Got a reorg: {:?}", reorg_range); + // Delete any elections that are ongoing for any blocks in the reorg range. + + let block_witness_ranges = + reorg_range.block_witness_ranges().map_err(|()| CorruptStorageError::new())?; + for (i, election_identifier) in election_identifiers.into_iter().enumerate() { + let election = ElectoralAccess::election_mut(election_identifier); + let (block_witness_range, _) = election.properties()?; + + if block_witness_ranges.contains(&block_witness_range) { + log::info!( + "Deleting election with root: {:?} due to reorg", + block_witness_range.root() + ); + election.delete(); + elections_open_for.remove(&block_witness_range.root()); + remaining_election_identifiers.remove(i); + } + } + + // TODO: Limit this to maximum number of elections we can start at once (and write a + // test for this case where the range of the reorg is > + // max_concurrent_elections). TODO: Wrap with safe mode, no new elections. + for range in block_witness_ranges.clone() { + let root = *range.root(); + log::info!("New election for root: {:?}", root); + // println!("New election for root in reorg: {:?}", root); + ElectoralAccess::new_election( + (), + (range, ElectionGenerator::generate_election_properties(root)), + (), + )?; + last_block_election_emitted_for = root; + elections_open_for.insert(root); + } + + // NB: We do not clear any of the unprocessed data here. This is because we need + // to prevent double dispatches. By keeping the state, if we have a reorg we + // can check against the state in the process_block_data hook to ensure we + // don't double dispatch. + reorg_range.witness_to_root() + }, + ChainProgress::None(last_block_root_seen) => { + log::info!("No progress, last block root seen: {:?}", last_block_root_seen); + *last_block_root_seen + }, + ChainProgress::Continuous(witness_range) => { + if last_block_election_emitted_for.into() == 0u64 { + // The first time we see a block, we want to initialise the + // last_block_election_emitted_for, so we only emit an election for the most + // recent. TODO: Make this it's own enum variant `FirstConsenus`? + last_block_election_emitted_for = + witness_range.witness_from_root.saturating_sub(Chain::WITNESS_PERIOD); + return Ok(()) + } + log::info!("Continuous progress: {:?}", witness_range); + witness_range.witness_to_root() + }, + }; + + ensure!(Chain::is_block_witness_root(last_seen_root), { + log::error!("Last seen block root is not a block witness root"); + CorruptStorageError::new() + }); + + // Start any new elections if we can. + // TODO: Wrap in safe mode + let settings = ElectoralAccess::unsynchronised_settings()?; + + // We always want to check with remaining elections we can resolve, note the ones we just + // initiated won't be included here, which is intention, they can't have come to consensus + // yet. + for election_identifier in remaining_election_identifiers { + let election_access = ElectoralAccess::election_mut(election_identifier); + if let Some(block_data) = election_access.check_consensus()?.has_consensus() { + log::info!("Got consensus on block data: {:?}", block_data); + // println!("Got consensus on block data: {:?}", block_data); + + let (block_witness_range, _extra_properties) = election_access.properties()?; + + election_access.delete(); + + elections_open_for.remove(block_witness_range.root()); + unprocessed_data.push((*block_witness_range.root(), block_data)); + } + } + + log::info!("Last block root seen: {:?}", last_seen_root); + log::info!("Last block election emitted for: {:?}", last_block_election_emitted_for); + log::info!("Open elections: {:?}", elections_open_for); + log::info!("Max concurrent elections: {:?}", settings.max_concurrent_elections); + + // println!("Last block root seen: {:?}", last_seen_root); + // println!("Last block election emitted for: {:?}", last_block_election_emitted_for); + // println!("Open elections: {:?}", elections_open_for); + + for range_root in (last_block_election_emitted_for.saturating_add(Chain::WITNESS_PERIOD)..= + last_seen_root) + .step_by(Into::::into(Chain::WITNESS_PERIOD) as usize) + .take( + (settings.max_concurrent_elections as usize) + .saturating_sub(elections_open_for.len()), + ) { + log::info!("Starting new election for root: {:?}", range_root); + // println!("Starting new election for root: {:?}", range_root); + ElectoralAccess::new_election( + (), + ( + BlockWitnessRange::try_new(range_root, Chain::WITNESS_PERIOD).map_err(|e| { + log::error!("Failed to create block witness range {e:?}"); + CorruptStorageError::new() + })?, + ElectionGenerator::generate_election_properties(range_root), + ), + (), + )?; + last_block_election_emitted_for = range_root; + elections_open_for.insert(range_root); + } + + let earliest_open_election = elections_open_for.iter().next().cloned(); + let earliest = unprocessed_data.iter().map(|(block_number, _)| block_number).min().cloned(); + let earliest_unprocessed_block = min( + // If there are no elections open and no data, then the last time we emitted election + // is the last thing we processed. + earliest_open_election.unwrap_or(last_block_election_emitted_for), + earliest.unwrap_or(last_block_election_emitted_for), + ); + + debug_assert!(earliest_unprocessed_block <= last_block_election_emitted_for); + + unprocessed_data = BlockDataProcessor::process_block_data( + last_seen_root, + earliest_unprocessed_block, + unprocessed_data, + ); + + log::info!( + "Processed block data up to: {:?}, earliest unprocessed block: {:?}", + last_seen_root, + earliest_unprocessed_block + ); + + debug_assert!( + ::is_block_witness_root(last_block_election_emitted_for), + "We only store this if it passes the original block witness root check" + ); + + ElectoralAccess::set_unsynchronised_state(BlockWitnesserState { + elections_open_for, + last_block_election_emitted_for, + unprocessed_data, + })?; + + Ok(()) + } + + fn check_consensus>( + _election_access: &ElectionAccess, + _previous_consensus: Option<&Self::Consensus>, + consensus_votes: ConsensusVotes, + ) -> Result, CorruptStorageError> { + let num_authorities = consensus_votes.num_authorities(); + let active_votes = consensus_votes.active_votes(); + let num_active_votes = active_votes.len() as u32; + let success_threshold = success_threshold_from_share_count(num_authorities); + Ok(if num_active_votes >= success_threshold { + let mut hash_to_block_data = BTreeMap::::new(); + + let mut counts = BTreeMap::::new(); + for vote in active_votes { + let vote_hash = SharedDataHash::of(&vote); + hash_to_block_data.insert(vote_hash, vote.clone()); + counts.entry(vote_hash).and_modify(|count| *count += 1).or_insert(1); + } + counts.iter().find_map(|(vote, count)| { + if *count >= success_threshold { + Some(hash_to_block_data.get(vote).expect("We must insert it above").clone()) + } else { + None + } + }) + } else { + None + }) + } +} diff --git a/state-chain/pallets/cf-elections/src/electoral_systems/composite.rs b/state-chain/pallets/cf-elections/src/electoral_systems/composite.rs index 1c0f49e18f..77e8db6053 100644 --- a/state-chain/pallets/cf-elections/src/electoral_systems/composite.rs +++ b/state-chain/pallets/cf-elections/src/electoral_systems/composite.rs @@ -146,11 +146,15 @@ macro_rules! generate_electoral_system_tuple_impls { Ok(( match properties { CompositeVoteProperties::$electoral_system(properties) => properties, + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => return Err(CorruptStorageError::new()), }, match vote { AuthorityVote::PartialVote(CompositePartialVote::$electoral_system(partial_vote)) => AuthorityVote::PartialVote(partial_vote), AuthorityVote::Vote(CompositeVote::$electoral_system(vote)) => AuthorityVote::Vote(vote), + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => return Err(CorruptStorageError::new()), }, )) @@ -191,6 +195,8 @@ macro_rules! generate_electoral_system_tuple_impls { ) }, )* + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => true, } } @@ -211,11 +217,15 @@ macro_rules! generate_electoral_system_tuple_impls { Ok(( match previous_properties { CompositeVoteProperties::$electoral_system(previous_properties) => previous_properties, + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => return Err(CorruptStorageError::new()), }, match previous_vote { AuthorityVote::PartialVote(CompositePartialVote::$electoral_system(partial_vote)) => AuthorityVote::PartialVote(partial_vote), AuthorityVote::Vote(CompositeVote::$electoral_system(vote)) => AuthorityVote::Vote(vote), + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => return Err(CorruptStorageError::new()), }, )) @@ -223,6 +233,8 @@ macro_rules! generate_electoral_system_tuple_impls { partial_vote, ).map(CompositeVoteProperties::$electoral_system) },)* + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => Err(CorruptStorageError::new()), } } @@ -249,6 +261,8 @@ macro_rules! generate_electoral_system_tuple_impls { previous_consensus.map(|previous_consensus| { match previous_consensus { CompositeConsensus::$electoral_system(previous_consensus) => Ok(previous_consensus), + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => Err(CorruptStorageError::new()), } }).transpose()?, @@ -263,6 +277,8 @@ macro_rules! generate_electoral_system_tuple_impls { vote: Some((properties, vote)), validator_id }), + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => Err(CorruptStorageError::new()), } } else { @@ -318,6 +334,8 @@ macro_rules! generate_electoral_system_tuple_impls { CompositeElectionProperties::$current(properties) => { Ok(properties) }, + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => Err(CorruptStorageError::new()) } } @@ -326,6 +344,8 @@ macro_rules! generate_electoral_system_tuple_impls { CompositeElectionState::$current(state) => { Ok(state) }, + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => Err(CorruptStorageError::new()) } } @@ -367,6 +387,8 @@ macro_rules! generate_electoral_system_tuple_impls { consensus_status.try_map(|consensus| { match consensus { CompositeConsensus::$current(consensus) => Ok(consensus), + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => Err(CorruptStorageError::new()), } }) @@ -399,6 +421,8 @@ macro_rules! generate_electoral_system_tuple_impls { match StorageAccess::unsynchronised_state_map(&CompositeElectoralUnsynchronisedStateMapKey::$current(key.clone())) { Some(CompositeElectoralUnsynchronisedStateMapValue::$current(value)) => Ok(Some(value)), None => Ok(None), + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => Err(CorruptStorageError::new()), } } @@ -459,4 +483,6 @@ macro_rules! generate_electoral_system_tuple_impls { }; } +generate_electoral_system_tuple_impls!(tuple_1_impls: ((A, A0))); +generate_electoral_system_tuple_impls!(tuple_2_impls: ((A, A0), (B, B0))); generate_electoral_system_tuple_impls!(tuple_7_impls: ((A, A0), (B, B0), (C, C0), (D, D0), (EE, E0), (FF, F0), (GG, G0))); diff --git a/state-chain/pallets/cf-elections/src/electoral_systems/mocks.rs b/state-chain/pallets/cf-elections/src/electoral_systems/mocks.rs index 1f50861ba7..0eca319cc3 100644 --- a/state-chain/pallets/cf-elections/src/electoral_systems/mocks.rs +++ b/state-chain/pallets/cf-elections/src/electoral_systems/mocks.rs @@ -81,12 +81,10 @@ where Self { unsynchronised_settings, ..self } } - #[allow(dead_code)] pub fn with_electoral_settings(self, electoral_settings: ES::ElectoralSettings) -> Self { Self { electoral_settings, ..self } } - #[allow(dead_code)] pub fn with_initial_election_state( self, extra: ES::ElectionIdentifierExtra, @@ -185,6 +183,47 @@ where ) } + // TODO: factor out with above. + // Note: it's important that these expectations are executed in order, as some tests rely on + // testing that the order several elections are processed does not matter. + pub fn expect_consensus_multi( + self, + votes_and_expectations: Vec<(ConsensusVotes, Option)>, + ) -> Self { + let mut active_election_ids = self.all_election_ids().into_iter(); + + let mut next_self = self.clone(); + + for (mut consensus_votes, expected_consensus) in votes_and_expectations { + assert!(consensus_votes.num_authorities() > 0, "Cannot have zero authorities."); + + use rand::seq::SliceRandom; + consensus_votes.votes.shuffle(&mut rand::thread_rng()); + + let current_election_id = &active_election_ids + .next() + .expect("More expected elections than active.") + .clone(); + + let new_consensus = MockAccess::::election(*current_election_id) + .check_consensus(None, consensus_votes) + .unwrap(); + + // Should assert on some condition about the consensus. + assert_eq!(new_consensus.clone(), expected_consensus); + + next_self = next_self.inner_force_consensus_update( + *current_election_id, + if let Some(consensus) = new_consensus { + ConsensusStatus::Gained { most_recent: None, new: consensus } + } else { + ConsensusStatus::None + }, + ); + } + self + } + pub fn only_election_id(&self) -> ElectionIdentifierOf { self.all_election_ids() .into_iter() diff --git a/state-chain/pallets/cf-elections/src/electoral_systems/mocks/access.rs b/state-chain/pallets/cf-elections/src/electoral_systems/mocks/access.rs index 492c09a557..c6cce32433 100644 --- a/state-chain/pallets/cf-elections/src/electoral_systems/mocks/access.rs +++ b/state-chain/pallets/cf-elections/src/electoral_systems/mocks/access.rs @@ -295,7 +295,6 @@ impl MockStorageAccess { identifier: ElectionIdentifierOf, state: ES::ElectionState, ) { - println!("Setting election state for identifier: {:?}", identifier); ELECTION_STATE.with(|old_state| { let mut state_ref = old_state.borrow_mut(); state_ref.insert(identifier.unique_monotonic().encode(), state.encode()); @@ -420,6 +419,7 @@ impl MockStorageAccess { key: ES::ElectoralUnsynchronisedStateMapKey, value: Option, ) { + println!("Setting unsynced state map for key: {:?}", key); ELECTORAL_UNSYNCHRONISED_STATE_MAP.with(|old_state_map| { let mut state_map_ref = old_state_map.borrow_mut(); match value { diff --git a/state-chain/pallets/cf-elections/src/electoral_systems/tests.rs b/state-chain/pallets/cf-elections/src/electoral_systems/tests.rs index 0d9b2519c3..9c489f5909 100644 --- a/state-chain/pallets/cf-elections/src/electoral_systems/tests.rs +++ b/state-chain/pallets/cf-elections/src/electoral_systems/tests.rs @@ -2,6 +2,7 @@ pub(crate) use super::mocks; pub(crate) use crate::register_checks; +pub mod block_witnesser; pub mod delta_based_ingress; pub mod egress_success; pub mod liveness; diff --git a/state-chain/pallets/cf-elections/src/electoral_systems/tests/block_witnesser.rs b/state-chain/pallets/cf-elections/src/electoral_systems/tests/block_witnesser.rs new file mode 100644 index 0000000000..7bbfc173ee --- /dev/null +++ b/state-chain/pallets/cf-elections/src/electoral_systems/tests/block_witnesser.rs @@ -0,0 +1,672 @@ +// 1. We come to consensus when all data is the same. +// 2. We execute hooks when coming to consensus +// 3. On finalize we start as many elections as possible within the new range, but no more than the +// maximum => skipping received blocks is fine, even ranges +// 4. First time the ES is run, we should only spawn the last election. Last procsesed will not +// exist, so we have to ensure we don't generate +// all elections until the beginning of time. +// 5. Ranges are consistent, and if we're mid range we don't emit a new election, we wait for next +// time we can election for a whole range. +// 6. State updates. When channel is opened, what happens? new election? +// Out of order consensus - when catching up this is possible. We need to ensure everything is still +// handled correctly. +// Testing with a chain with range > 1 +// State partially processed, how do we test that the state still gets processed until all the state +// is processed. + +use super::{ + mocks::{Check, TestSetup}, + register_checks, +}; +use crate::{ + electoral_system::{ConsensusVote, ConsensusVotes, ElectoralSystem}, + electoral_systems::{ + block_height_tracking::{OldChainProgress as ChainProgress, RangeOfBlockWitnessRanges}, + block_witnesser::*, + }, +}; +use cf_chains::{mocks::MockEthereum, Chain}; +use sp_std::collections::btree_set::BTreeSet; + +thread_local! { + pub static PROPERTIES_TO_RETURN: std::cell::RefCell = const { std::cell::RefCell::new(BTreeSet::new()) }; + pub static GENERATE_ELECTION_HOOK_CALLED: std::cell::Cell = const { std::cell::Cell::new(0) }; + pub static PROCESS_BLOCK_DATA_HOOK_CALLED: std::cell::Cell = const { std::cell::Cell::new(0) }; + // the actual block data that process_block_data was called with. + pub static PROCESS_BLOCK_DATA_CALLED_WITH: std::cell::RefCell> = const { std::cell::RefCell::new(vec![]) }; + // Flag to pass through block data: + pub static PASS_THROUGH_BLOCK_DATA: std::cell::Cell = const { std::cell::Cell::new(true) }; + pub static PROCESS_BLOCK_DATA_TO_RETURN: std::cell::RefCell> = const { std::cell::RefCell::new(vec![]) }; +} + +pub type ChainBlockNumber = ::ChainBlockNumber; +pub type ValidatorId = u16; + +pub type BlockData = Vec; + +struct MockGenerateElectionHook { + _phantom: core::marker::PhantomData<(ChainBlockNumber, Properties)>, +} + +fn range_n(n: u64) -> RangeOfBlockWitnessRanges { + // TODO: Test with other witness ranges. + RangeOfBlockWitnessRanges::try_new(n, n, 1).unwrap() +} + +pub type Properties = BTreeSet; + +impl BlockElectionPropertiesGenerator + for MockGenerateElectionHook +{ + fn generate_election_properties(_root_to_witness: ChainBlockNumber) -> Properties { + GENERATE_ELECTION_HOOK_CALLED.with(|hook_called| hook_called.set(hook_called.get() + 1)); + // The properties are not important to the logic of the electoral system itself, so we can + // return empty. + BTreeSet::new() + } +} + +struct MockBlockProcessor { + _phantom: core::marker::PhantomData<(ChainBlockNumber, BlockData)>, +} + +impl MockBlockProcessor { + pub fn set_block_data_to_return(block_data: Vec<(ChainBlockNumber, BlockData)>) { + PASS_THROUGH_BLOCK_DATA.with(|pass_through| pass_through.set(false)); + PROCESS_BLOCK_DATA_TO_RETURN + .with(|block_data_to_return| *block_data_to_return.borrow_mut() = block_data); + } +} + +impl ProcessBlockData + for MockBlockProcessor +{ + // We need to do more here, like store some state and push back. + fn process_block_data( + // This isn't so important, in these tests, it's important for the implemenation of the + // hooks. e.g. to determine a safety margin. + _chain_block_number: ChainBlockNumber, + earliest_unprocessed_block: ChainBlockNumber, + block_data: Vec<(ChainBlockNumber, BlockData)>, + ) -> Vec<(ChainBlockNumber, BlockData)> { + PROCESS_BLOCK_DATA_HOOK_CALLED.with(|hook_called| hook_called.set(hook_called.get() + 1)); + + PROCESS_BLOCK_DATA_CALLED_WITH + .with(|old_block_data| *old_block_data.borrow_mut() = block_data.clone()); + + if PASS_THROUGH_BLOCK_DATA.with(|pass_through| pass_through.get()) { + println!("passing through block data"); + block_data + } else { + PROCESS_BLOCK_DATA_TO_RETURN + .with(|block_data_to_return| block_data_to_return.borrow().clone()) + } + + // TODO: Think about if we need this check. It's not currently enforced in the traits, so + // perhaps instead we should handle cases where the hook returns any set of properties. It + // would usually be wrong to do so, but this ES doens't have to break as a result. + // check that all blocks in block_data_to_retun are in block_data to ensure test consistency + // block_data_to_return + // .clone() + // .into_iter() + // .for_each(|(block_number, block_data_return)| { + // if let Some(data) = block_data_return { + // assert!(block_data_vec.contains(&(block_number, data))); + // } else { + // assert!(!block_data_vec.iter().any(|(number, _)| number == &block_number)); + // } + // }); + } +} + +// We need to provide a mock chain here... MockEthereum might be what we're after +type SimpleBlockWitnesser = BlockWitnesser< + MockEthereum, + BlockData, + Properties, + ValidatorId, + MockBlockProcessor, + MockGenerateElectionHook, +>; + +register_checks! { + SimpleBlockWitnesser { + generate_election_properties_called_n_times(_pre, _post, n: u8) { + assert_eq!(GENERATE_ELECTION_HOOK_CALLED.with(|hook_called| hook_called.get()), n, "generate_election_properties should have been called {} times so far!", n); + }, + number_of_open_elections_is(_pre, post, n: ElectionCount) { + assert_eq!(post.unsynchronised_state.elections_open_for.len(), n as usize, "Number of open elections should be {}", n); + }, + process_block_data_called_n_times(_pre, _post, n: u8) { + assert_eq!(PROCESS_BLOCK_DATA_HOOK_CALLED.with(|hook_called| hook_called.get()), n, "process_block_data should have been called {} times so far!", n); + }, + process_block_data_called_last_with(_pre, _post, block_data: Vec<(ChainBlockNumber, BlockData)>) { + assert_eq!(PROCESS_BLOCK_DATA_CALLED_WITH.with(|old_block_data| old_block_data.borrow().clone()), block_data, "process_block_data should have been called with {:?}", block_data); + }, + unprocessed_data_is(_pre, post, data: Vec<(ChainBlockNumber, BlockData)>) { + assert_eq!(post.unsynchronised_state.unprocessed_data, data, "Unprocessed data should be {:?}", data); + }, + } +} + +fn generate_votes( + correct_voters: BTreeSet, + incorrect_voters: BTreeSet, + did_not_vote: BTreeSet, + correct_data: BlockData, +) -> ConsensusVotes { + println!("Generate votes called"); + + let incorrect_data = vec![1u8, 2, 3]; + assert_ne!(incorrect_data, correct_data); + let votes = ConsensusVotes { + votes: correct_voters + .clone() + .into_iter() + .map(|v| ConsensusVote { vote: Some(((), correct_data.clone())), validator_id: v }) + .chain(incorrect_voters.clone().into_iter().map(|v| ConsensusVote { + vote: Some(((), incorrect_data.clone())), + validator_id: v, + })) + .chain( + did_not_vote + .clone() + .into_iter() + .map(|v| ConsensusVote { vote: None, validator_id: v }), + ) + .collect(), + }; + println!("correct voters: {:?}", correct_voters.len()); + println!("incorrect voters: {:?}", incorrect_voters.len()); + println!("did not vote: {:?}", did_not_vote.len()); + votes +} + +// Util to create a successful set of votes, along with the consensus expectation. +fn create_votes_expectation( + consensus: BlockData, +) -> ( + ConsensusVotes, + Option<::Consensus>, +) { + ( + generate_votes( + (0..20).collect(), + Default::default(), + Default::default(), + consensus.clone(), + ), + Some(consensus), + ) +} + +const MAX_CONCURRENT_ELECTIONS: ElectionCount = 5; + +// We start an election for a block and there is nothing there. The base case. +#[test] +fn no_block_data_success() { + const NEXT_BLOCK_RECEIVED: ChainBlockNumber = 1; + TestSetup::::default() + .with_unsynchronised_settings(BlockWitnesserSettings { + max_concurrent_elections: MAX_CONCURRENT_ELECTIONS, + }) + .build() + .test_on_finalize( + &ChainProgress::Continuous(range_n(NEXT_BLOCK_RECEIVED)), + |_| {}, + vec![ + Check::::generate_election_properties_called_n_times(1), + Check::::number_of_open_elections_is(1), + Check::::process_block_data_called_n_times(1), + ], + ) + .expect_consensus( + generate_votes((0..20).collect(), Default::default(), Default::default(), vec![]), + Some(vec![]), + ) + .test_on_finalize( + &ChainProgress::Continuous(range_n(NEXT_BLOCK_RECEIVED)), + |_| {}, + vec![ + // No extra calls + Check::::generate_election_properties_called_n_times(1), + Check::::process_block_data_called_n_times(2), + // We should receive an empty block data, but still get the block number. This is + // necessary so we can track the last chain block we've processed. + Check::::process_block_data_called_last_with(vec![( + NEXT_BLOCK_RECEIVED, + vec![], + )]), + ], + ); +} + +#[test] +fn creates_multiple_elections_below_maximum_when_required() { + const INIT_LAST_BLOCK_RECEIVED: ChainBlockNumber = 0; + const NUMBER_OF_ELECTIONS: ElectionCount = MAX_CONCURRENT_ELECTIONS - 1; + TestSetup::::default() + .with_unsynchronised_settings(BlockWitnesserSettings { + max_concurrent_elections: MAX_CONCURRENT_ELECTIONS, + }) + .build() + .test_on_finalize( + // Process multiple elections, but still less than the maximum concurrent + &ChainProgress::Continuous(range_n( + INIT_LAST_BLOCK_RECEIVED + (NUMBER_OF_ELECTIONS as u64), + )), + |pre_state| { + assert_eq!(pre_state.unsynchronised_state.elections_open_for.len(), 0); + }, + vec![ + Check::::generate_election_properties_called_n_times(4), + Check::::number_of_open_elections_is(NUMBER_OF_ELECTIONS), + ], + ) + .expect_consensus_multi(vec![ + ( + generate_votes((0..20).collect(), Default::default(), Default::default(), vec![]), + Some(vec![]), + ), + ( + generate_votes( + (0..20).collect(), + Default::default(), + Default::default(), + vec![1, 3, 4], + ), + Some(vec![1, 3, 4]), + ), + // no progress on external chain but on finalize called again + ]) + .test_on_finalize( + // same block again + &ChainProgress::None(INIT_LAST_BLOCK_RECEIVED + (NUMBER_OF_ELECTIONS as u64)), + |pre_state| { + assert_eq!( + pre_state.unsynchronised_state.elections_open_for.len(), + NUMBER_OF_ELECTIONS as usize + ); + }, + vec![ + // Still no extra elections created. + Check::::generate_election_properties_called_n_times( + NUMBER_OF_ELECTIONS as u8, + ), + // we should have resolved two elections + Check::::number_of_open_elections_is(2), + ], + ); +} + +#[test] +fn creates_multiple_elections_limited_by_maximum() { + const INIT_LAST_BLOCK_RECEIVED: ChainBlockNumber = 0; + const NUMBER_OF_ELECTIONS_REQUIRED: ElectionCount = MAX_CONCURRENT_ELECTIONS * 2; + let consensus_resolutions: Vec<( + ConsensusVotes, + Option<::Consensus>, + )> = vec![ + create_votes_expectation(vec![]), + create_votes_expectation(vec![1, 3, 4]), + // no progress on external chain but on finalize called again + ]; + let number_of_resolved_elections = consensus_resolutions.len(); + TestSetup::::default() + .with_unsynchronised_settings(BlockWitnesserSettings { + max_concurrent_elections: MAX_CONCURRENT_ELECTIONS, + }) + .build() + .test_on_finalize( + // Process multiple elections, but still elss than the maximum concurrent + &ChainProgress::Continuous(range_n( + INIT_LAST_BLOCK_RECEIVED + (NUMBER_OF_ELECTIONS_REQUIRED as u64), + )), + |pre_state| { + assert_eq!(pre_state.unsynchronised_state.elections_open_for.len(), 0); + }, + vec![ + Check::::generate_election_properties_called_n_times( + MAX_CONCURRENT_ELECTIONS as u8, + ), + Check::::number_of_open_elections_is( + MAX_CONCURRENT_ELECTIONS, + ), + ], + ) + // Only resolve two of the elections. The last 3 are unresolved at this point. But + // we now have space to start new elections. + .expect_consensus_multi(consensus_resolutions) + .test_on_finalize( + &ChainProgress::None(INIT_LAST_BLOCK_RECEIVED + (NUMBER_OF_ELECTIONS_REQUIRED as u64)), + |pre_state| { + assert_eq!( + pre_state.unsynchronised_state.elections_open_for.len(), + MAX_CONCURRENT_ELECTIONS as usize + ); + }, + vec![ + Check::::generate_election_properties_called_n_times( + MAX_CONCURRENT_ELECTIONS as u8 + number_of_resolved_elections as u8, + ), + // we should have resolved two elections + Check::::number_of_open_elections_is( + MAX_CONCURRENT_ELECTIONS, + ), + ], + ); +} + +#[test] +fn reorg_clears_on_going_elections_and_continues() { + const INIT_LAST_BLOCK_RECEIVED: ChainBlockNumber = 10; + const NEXT_BLOCK_NUMBER: ChainBlockNumber = + INIT_LAST_BLOCK_RECEIVED + MAX_CONCURRENT_ELECTIONS as u64; + const REORG_LENGTH: ChainBlockNumber = 3; + + let all_votes = (INIT_LAST_BLOCK_RECEIVED + 1..=NEXT_BLOCK_NUMBER) + .map(|_| create_votes_expectation(vec![5, 6, 7])) + .collect::>(); + + // We have already emitted an election for `INIT_LAST_BLOCK_RECEIVED` (see TestSetup below), so + // we add 1. + let expected_unprocessed_data = (INIT_LAST_BLOCK_RECEIVED + 1..=NEXT_BLOCK_NUMBER) + .map(|i| (i, vec![5, 6, 7])) + .collect::>(); + + let mut block_after_reorg_block_unprocessed_data = expected_unprocessed_data.clone(); + block_after_reorg_block_unprocessed_data + .push(((NEXT_BLOCK_NUMBER - REORG_LENGTH), vec![5, 6, 77])); + + TestSetup::::default() + .with_unsynchronised_state(BlockWitnesserState { + last_block_election_emitted_for: INIT_LAST_BLOCK_RECEIVED, + ..BlockWitnesserState::default() + }) + .with_unsynchronised_settings(BlockWitnesserSettings { + max_concurrent_elections: MAX_CONCURRENT_ELECTIONS, + }) + .build() + .test_on_finalize( + &ChainProgress::Continuous(range_n(NEXT_BLOCK_NUMBER)), + |_| {}, + vec![ + Check::::generate_election_properties_called_n_times( + MAX_CONCURRENT_ELECTIONS as u8, + ), + Check::::number_of_open_elections_is( + MAX_CONCURRENT_ELECTIONS, + ), + // No reorg, so we try processing any unprocessed state (there would be none at + // this point though, since no elections have resolved). + Check::::process_block_data_called_n_times(1), + ], + ) + .then(|| println!("We about to come to consensus on some blocks.")) + .expect_consensus_multi(all_votes) + // Process votes as normal, progressing by one block, storing the state + .test_on_finalize( + &ChainProgress::Continuous(range_n(NEXT_BLOCK_NUMBER + 1)), + |_| {}, + vec![ + // We've already processed the other elections, so we only have to create a new + // election for the new block. + Check::::generate_election_properties_called_n_times( + MAX_CONCURRENT_ELECTIONS as u8 + 1, + ), + Check::::number_of_open_elections_is(1), + Check::::process_block_data_called_n_times(2), + Check::::unprocessed_data_is( + expected_unprocessed_data.clone(), + ), + ], + ) + .then(|| println!("We're about to come to consensus on a block that will trigger a reorg.")) + // Reorg occurs + .test_on_finalize( + &ChainProgress::Reorg( + RangeOfBlockWitnessRanges::try_new( + // Range is inclusive, so for reorg length reorg, we need to -1 from reorg + // length. + (NEXT_BLOCK_NUMBER + 1) - (REORG_LENGTH - 1), + NEXT_BLOCK_NUMBER + 1, + 1, + ) + .unwrap(), + ), + |_| {}, + // We remove the actives ones and open one for the first block that we detected a + // reorg for. + vec![ + Check::::generate_election_properties_called_n_times( + // REORG_LENGTH more than the last time we checked. + MAX_CONCURRENT_ELECTIONS as u8 + 1 + REORG_LENGTH as u8, + ), + Check::::number_of_open_elections_is(REORG_LENGTH as u16), + // We call it again, as even though there was a reorg, maybe some external state + // changed that process_block_data uses, and now can process some of the + // existing data. + Check::::process_block_data_called_n_times(3), + // We keep the data, since it may need to be used by process_block_data to + // deduplicate actions. We don't want to submit an action twice. + Check::::unprocessed_data_is(expected_unprocessed_data), + ], + ); +} + +// #[test] +// fn partially_processed_block_data_processed_next_on_finalize() { +// let first_block_consensus: BlockData = vec![5, 6, 7]; + +// let first_block_data_after_processing: Vec<_> = +// first_block_consensus.clone().into_iter().take(2).collect(); + +// const INIT_LAST_BLOCK_RECEIVED: ChainBlockNumber = 0; +// TestSetup::::default() +// .with_unsynchronised_state(BlockWitnesserState { +// last_block_received: INIT_LAST_BLOCK_RECEIVED, +// ..BlockWitnesserState::default() +// }) +// .with_unsynchronised_settings(BlockWitnesserSettings { +// max_concurrent_elections: MAX_CONCURRENT_ELECTIONS, +// }) +// .build() +// .test_on_finalize( +// &range_n(INIT_LAST_BLOCK_RECEIVED + 1), +// |_| {}, +// vec![ +// Check::::generate_election_properties_called_n_times(1), +// Check::::number_of_open_elections_is(1), +// Check::::process_block_data_called_n_times(1), +// // We haven't come to consensus on any elections, so there's no unprocessed data. +// Check::::process_block_data_called_last_with(vec![]), +// ], +// ) +// .expect_consensus_multi(vec![create_votes_expectation(first_block_consensus.clone())]) +// .then(|| { +// // We process one of the items, so we return only 2 of 3. +// MockBlockProcessor::set_block_data_to_return(vec![( +// INIT_LAST_BLOCK_RECEIVED + 1, +// first_block_data_after_processing.clone(), +// )]); +// }) +// .test_on_finalize( +// &range_n(INIT_LAST_BLOCK_RECEIVED + 2), +// |_| {}, +// vec![ +// Check::::generate_election_properties_called_n_times(2), +// // One opened, one closed. +// Check::::number_of_open_elections_is(1), +// // We call it again. +// Check::::process_block_data_called_n_times(2), +// // We have the election data for the election we emitted before now. We try to +// // process it. +// Check::::process_block_data_called_last_with(vec![( +// INIT_LAST_BLOCK_RECEIVED + 1, +// first_block_consensus, +// )]), +// ], +// ) +// // No progress on external chain, so state should be the same as above, except that we +// // processed one of the items last time. +// .test_on_finalize( +// &range_n(INIT_LAST_BLOCK_RECEIVED + 2), +// |_| {}, +// vec![ +// Check::::generate_election_properties_called_n_times(2), +// Check::::number_of_open_elections_is(1), +// // We call it again. +// Check::::process_block_data_called_n_times(3), +// Check::::process_block_data_called_last_with(vec![( +// INIT_LAST_BLOCK_RECEIVED + 1, +// first_block_data_after_processing, +// )]), +// ], +// ); +// } + +// #[test] +// fn elections_resolved_out_of_order_has_no_impact() { +// const INIT_LAST_BLOCK_RECEIVED: ChainBlockNumber = 0; +// const FIRST_ELECTION_BLOCK_CREATED: ChainBlockNumber = INIT_LAST_BLOCK_RECEIVED + 1; +// const SECOND_ELECTION_BLOCK_CREATED: ChainBlockNumber = FIRST_ELECTION_BLOCK_CREATED + 1; +// const NUMBER_OF_ELECTIONS: ElectionCount = 2; +// TestSetup::::default() +// .with_unsynchronised_state(BlockWitnesserState { +// last_block_received: INIT_LAST_BLOCK_RECEIVED, +// ..BlockWitnesserState::default() +// }) +// .with_unsynchronised_settings(BlockWitnesserSettings { +// max_concurrent_elections: MAX_CONCURRENT_ELECTIONS, +// }) +// .build() +// .test_on_finalize( +// // Process multiple elections, but still elss than the maximum concurrent +// &range_n(INIT_LAST_BLOCK_RECEIVED + 2), +// |pre_state| { +// assert_eq!(pre_state.unsynchronised_state.open_elections, 0); +// }, +// vec![ +// Check::::generate_election_properties_called_n_times( +// NUMBER_OF_ELECTIONS as u8, +// ), +// Check::::number_of_open_elections_is(NUMBER_OF_ELECTIONS), +// ], +// ) +// .expect_consensus_multi(vec![ +// ( +// // no consensus +// generate_votes((0..20).collect(), (0..20).collect(), Default::default(), vec![]), +// None, +// ), +// ( +// // consensus +// generate_votes( +// (0..40).collect(), +// Default::default(), +// Default::default(), +// vec![1, 3, 4], +// ), +// Some(vec![1, 3, 4]), +// ), +// ]) +// // no progress on external chain but on finalize called again +// // TODO: Check the new elections have kicked off correct +// .test_on_finalize( +// &range_n(INIT_LAST_BLOCK_RECEIVED + (NUMBER_OF_ELECTIONS as u64) + 1), +// |pre_state| { +// assert_eq!(pre_state.unsynchronised_state.open_elections, NUMBER_OF_ELECTIONS); +// }, +// vec![ +// // one extra election created +// Check::::generate_election_properties_called_n_times( +// (NUMBER_OF_ELECTIONS + 1) as u8, +// ), +// // we should have resolved one election, and started one election +// Check::::number_of_open_elections_is(2), +// Check::::unprocessed_data_is(vec![( +// SECOND_ELECTION_BLOCK_CREATED, +// vec![1, 3, 4], +// )]), +// ], +// ) +// // gain consensus on the first emitted election now +// .expect_consensus_multi(vec![( +// generate_votes( +// (0..40).collect(), +// Default::default(), +// Default::default(), +// vec![9, 1, 2], +// ), +// Some(vec![9, 1, 2]), +// )]) +// .test_on_finalize( +// &range_n(INIT_LAST_BLOCK_RECEIVED + (NUMBER_OF_ELECTIONS as u64) + 2), +// |pre_state| { +// assert_eq!( +// pre_state.unsynchronised_state.open_elections, 2, +// "number of open elections should be 2" +// ); +// }, +// vec![ +// // one extra election created +// Check::::generate_election_properties_called_n_times( +// (NUMBER_OF_ELECTIONS + 2) as u8, +// ), +// // we should have resolved one elections, and started one election +// Check::::number_of_open_elections_is(2), +// // Now the first election we emitted is resolved, and its block data should be +// // stored, and we should still have the second election block data. +// Check::::unprocessed_data_is(vec![ +// (SECOND_ELECTION_BLOCK_CREATED, vec![1, 3, 4]), +// (FIRST_ELECTION_BLOCK_CREATED, vec![9, 1, 2]), +// ]), +// ], +// ) +// // Gain consensus on the final elections +// .expect_consensus_multi(vec![ +// ( +// generate_votes( +// (0..40).collect(), +// Default::default(), +// Default::default(), +// vec![81, 1, 93], +// ), +// Some(vec![81, 1, 93]), +// ), +// ( +// generate_votes( +// (0..40).collect(), +// Default::default(), +// Default::default(), +// vec![69, 69, 69], +// ), +// Some(vec![69, 69, 69]), +// ), +// ]) +// // external chain doesn't move forward +// .test_on_finalize( +// &range_n(INIT_LAST_BLOCK_RECEIVED + (NUMBER_OF_ELECTIONS as u64) + 2), +// |pre_state| { +// assert_eq!( +// pre_state.unsynchronised_state.open_elections, 2, +// "number of open elections should be 2" +// ); +// }, +// vec![ +// // one extra election created +// Check::::generate_election_properties_called_n_times( +// (NUMBER_OF_ELECTIONS + 2) as u8, +// ), +// // all elections have resolved now +// Check::::number_of_open_elections_is(0), +// // Now the last two elections are resolved in order +// Check::::unprocessed_data_is(vec![ +// (SECOND_ELECTION_BLOCK_CREATED, vec![1, 3, 4]), +// (FIRST_ELECTION_BLOCK_CREATED, vec![9, 1, 2]), +// (SECOND_ELECTION_BLOCK_CREATED + 1, vec![81, 1, 93]), +// (SECOND_ELECTION_BLOCK_CREATED + 2, vec![69, 69, 69]), +// ]), +// ], +// ); +// } diff --git a/state-chain/pallets/cf-elections/src/lib.rs b/state-chain/pallets/cf-elections/src/lib.rs index d8f8f9864a..aafb313237 100644 --- a/state-chain/pallets/cf-elections/src/lib.rs +++ b/state-chain/pallets/cf-elections/src/lib.rs @@ -102,6 +102,8 @@ //! or reconstructed from the others is also configured via that trait. #![feature(try_find)] +#![feature(step_trait)] +#![feature(trait_alias)] #![cfg_attr(test, feature(closure_track_caller))] #![cfg_attr(not(feature = "std"), no_std)] #![doc = include_str!("../README.md")] @@ -270,7 +272,10 @@ pub mod pallet { /// specifically in Solana's chain/fee tracking trait impls as those traits do not allow /// errors to be returned, this is ok, but should be avoided in future. #[derive(Debug, PartialEq, Eq)] - pub struct CorruptStorageError {} + pub struct CorruptStorageError { + // force usage of new() to log + _no_construct: (), + } impl CorruptStorageError { /// We use this function to create this error type (and make the struct impossible to /// create without it) so it is easier to find all locations we create the error, and so @@ -282,7 +287,7 @@ pub mod pallet { "Election pallet CorruptStorageError at '{}'.", core::panic::Location::caller() ); - Self {} + Self { _no_construct: () } } } } @@ -1613,6 +1618,7 @@ pub mod pallet { } } + log::info!("calling on_finalize for ElectionSystemRunner"); T::ElectoralSystemRunner::on_finalize(election_identifiers)?; Ok(()) diff --git a/state-chain/pallets/cf-elections/src/vote_storage/composite.rs b/state-chain/pallets/cf-elections/src/vote_storage/composite.rs index 3de8dc9e97..3613ea43c2 100644 --- a/state-chain/pallets/cf-elections/src/vote_storage/composite.rs +++ b/state-chain/pallets/cf-elections/src/vote_storage/composite.rs @@ -86,6 +86,8 @@ macro_rules! generate_vote_storage_tuple_impls { }) }, )* + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => Err(CorruptStorageError::new()), } } @@ -109,6 +111,8 @@ macro_rules! generate_vote_storage_tuple_impls { match get_shared_data(shared_data_hash)? { Some(CompositeSharedData::$t(shared_data)) => Ok(Some(shared_data)), None => Ok(None), + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => Err(CorruptStorageError::new()) } }, @@ -135,6 +139,8 @@ macro_rules! generate_vote_storage_tuple_impls { match get_shared_data(shared_data_hash)? { Some(CompositeSharedData::$t(shared_data)) => Ok(Some(shared_data)), None => Ok(None), + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => Err(CorruptStorageError::new()), } }, @@ -161,6 +167,8 @@ macro_rules! generate_vote_storage_tuple_impls { match get_shared_data(shared_data_hash)? { Some(CompositeSharedData::$t(shared_data)) => Ok(Some(shared_data)), None => Ok(None), + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => Err(CorruptStorageError::new()), } }, @@ -179,6 +187,8 @@ macro_rules! generate_vote_storage_tuple_impls { individual_component: None, bitmap_component: None, } => Ok(None), + // For when we have a composite of 1 + #[allow(unreachable_patterns)] _ => Err(CorruptStorageError::new()), } } @@ -274,4 +284,6 @@ macro_rules! generate_vote_storage_tuple_impls { } } +generate_vote_storage_tuple_impls!(tuple_1_impls: (A)); +generate_vote_storage_tuple_impls!(tuple_2_impls: (A, B)); generate_vote_storage_tuple_impls!(tuple_7_impls: (A, B, C, D, EE, FF, GG)); diff --git a/state-chain/pallets/cf-ingress-egress/src/lib.rs b/state-chain/pallets/cf-ingress-egress/src/lib.rs index 5bbc4ce2c8..ac320b97de 100644 --- a/state-chain/pallets/cf-ingress-egress/src/lib.rs +++ b/state-chain/pallets/cf-ingress-egress/src/lib.rs @@ -59,6 +59,7 @@ use scale_info::{ build::{Fields, Variants}, Path, Type, }; +use serde::{Deserialize, Serialize}; use sp_runtime::{traits::UniqueSaturatedInto, Percent}; use sp_std::{ boxed::Box, @@ -357,7 +358,20 @@ pub mod pallet { pub type TransactionInIdFor = <<>::TargetChain as Chain>::ChainCrypto as ChainCrypto>::TransactionInId; - #[derive(Clone, RuntimeDebug, PartialEq, Eq, Encode, Decode, TypeInfo, MaxEncodedLen)] + #[derive( + Clone, + RuntimeDebug, + PartialEq, + Eq, + Encode, + Decode, + TypeInfo, + MaxEncodedLen, + Ord, + PartialOrd, + Serialize, + Deserialize, + )] pub struct DepositWitness { pub deposit_address: C::ChainAccount, pub asset: C::ChainAsset, @@ -729,6 +743,12 @@ pub mod pallet { pub type NetworkFeeDeductionFromBoostPercent, I: 'static = ()> = StorageValue<_, Percent, ValueQuery>; + /// What the witnessing says we've processed up to. This allows us to expire channels safely. If + /// the witnessing has processed up to a block, then we can safely recycle the channels. + #[pallet::storage] + pub type ProcessedUpTo, I: 'static = ()> = + StorageValue<_, TargetChainBlockNumber, ValueQuery>; + #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event, I: 'static = ()> { @@ -972,7 +992,11 @@ pub mod pallet { Self::take_recyclable_addresses( recycle_queue, maximum_addresses_to_recycle, - T::ChainTracking::get_block_height(), + if T::TargetChain::NAME == "Bitcoin" { + ProcessedUpTo::::get() + } else { + T::ChainTracking::get_block_height() + }, ) } }); @@ -1824,7 +1848,7 @@ impl, I: 'static> Pallet { Err("Insufficient boost funds".into()) } - fn process_channel_deposit_prewitness( + pub fn process_channel_deposit_prewitness( DepositWitness { deposit_address, asset, amount, deposit_details }: DepositWitness< T::TargetChain, >, @@ -1948,7 +1972,7 @@ impl, I: 'static> Pallet { // A wrapper around `process_channel_deposit_full_witness_inner` that catches any // error and emits a rejection event - fn process_channel_deposit_full_witness( + pub fn process_channel_deposit_full_witness( deposit_witness: DepositWitness, block_height: TargetChainBlockNumber, ) { @@ -2797,6 +2821,48 @@ impl, I: 'static> Pallet { Ok(*id) }) } + + // TODO: Write test + + // This should only be used if we're using ProcessedUpTo to track the block height. + pub fn active_deposit_channels_at( + block_height: TargetChainBlockNumber, + ) -> Vec> { + debug_assert!(::is_block_witness_root(block_height)); + + // Opened at: 1, Expires at: 122, Processed up to: 0, not active at 163 + DepositChannelLookup::::iter_values() + .filter_map(|details| { + // TODO: Subtract safety from opened_at + if details.opened_at <= block_height && + (block_height <= details.expires_at && + // If we have not yet processed the expires_at block, then we shouldn't expire it yet. i.e. we should include it as an active channel. + ProcessedUpTo::::get() < details.expires_at) + { + // TODO: Filter not filter_map + log::info!( + "Include channel: {:?} for height: {}", + details.deposit_channel, + block_height + ); + Some(details) + } else { + log::info!( + "Don't include channel {:?} as it's not active at block height {:?}", + details.deposit_channel, + block_height + ); + log::info!( + "Opened at: {:?}, Expires at: {:?}, Processed up to: {:?}", + details.opened_at, + details.expires_at, + ProcessedUpTo::::get() + ); + None + } + }) + .collect() + } } impl, I: 'static> EgressApi for Pallet { diff --git a/state-chain/pallets/cf-ingress-egress/src/tests.rs b/state-chain/pallets/cf-ingress-egress/src/tests.rs index 1ca0e4dc3a..bec1978e1c 100644 --- a/state-chain/pallets/cf-ingress-egress/src/tests.rs +++ b/state-chain/pallets/cf-ingress-egress/src/tests.rs @@ -2207,8 +2207,8 @@ fn assembling_broker_fees() { new_test_ext().execute_with(|| { let broker_fee = Beneficiary { account: BROKER, bps: 0 }; - const AFFILIATE_IDS: [u64; 5] = [10, 20, 30, 40, 50]; - const AFFILIATE_SHORT_IDS: [u8; 5] = [1, 2, 3, 4, 5]; + const AFFILIATE_IDS: [u64; MAX_AFFILIATES as usize] = [10, 20, 30, 40, 50]; + const AFFILIATE_SHORT_IDS: [u8; MAX_AFFILIATES as usize] = [1, 2, 3, 4, 5]; assert_eq!(AFFILIATE_IDS.len(), MAX_AFFILIATES as usize); diff --git a/state-chain/runtime/src/chainflip.rs b/state-chain/runtime/src/chainflip.rs index 05d998309a..ffa6b2c104 100644 --- a/state-chain/runtime/src/chainflip.rs +++ b/state-chain/runtime/src/chainflip.rs @@ -10,6 +10,9 @@ mod missed_authorship_slots; mod offences; pub mod pending_rotation_broadcasts; mod signer_nomination; + +// Election pallet implementations +pub mod bitcoin_elections; pub mod solana_elections; use crate::{ diff --git a/state-chain/runtime/src/chainflip/bitcoin_elections.rs b/state-chain/runtime/src/chainflip/bitcoin_elections.rs new file mode 100644 index 0000000000..d89465cc8b --- /dev/null +++ b/state-chain/runtime/src/chainflip/bitcoin_elections.rs @@ -0,0 +1,209 @@ +use crate::{BitcoinIngressEgress, Runtime}; +use cf_chains::{btc, Bitcoin}; +use cf_traits::Chainflip; +use log::info; + +use cf_chains::instances::BitcoinInstance; + +use codec::{Decode, Encode, MaxEncodedLen}; +use pallet_cf_elections::{ + electoral_system::ElectoralSystem, + electoral_systems::{ + block_height_tracking::{ + self, state_machine_es::DsmElectoralSystem, BlockHeightTrackingConsensus, + BlockHeightTrackingDSM, ChainProgress, OldChainProgress, RangeOfBlockWitnessRanges, + }, + block_witnesser::{ + BlockElectionPropertiesGenerator, BlockWitnesser, BlockWitnesserSettings, + ProcessBlockData, + }, + composite::{ + tuple_2_impls::{DerivedElectoralAccess, Hooks}, + CompositeRunner, + }, + }, + CorruptStorageError, ElectionIdentifier, InitialState, InitialStateOf, RunnerStorageAccess, +}; + +use pallet_cf_ingress_egress::{ + DepositChannelDetails, DepositWitness, ProcessedUpTo, WitnessSafetyMargin, +}; +use scale_info::TypeInfo; + +use sp_runtime::Either; +use sp_std::vec::Vec; + +pub type BitcoinElectoralSystemRunner = CompositeRunner< + (BitcoinBlockHeightTracking, BitcoinDepositChannelWitnessing), + ::ValidatorId, + RunnerStorageAccess, + BitcoinElectionHooks, +>; + +#[derive(Clone, Copy, PartialEq, Eq, Debug, Encode, Decode, TypeInfo, MaxEncodedLen)] +pub struct OpenChannelDetails { + pub open_block: ChainBlockNumber, + pub close_block: ChainBlockNumber, +} + +pub type BitcoinDepositChannelWitnessing = BlockWitnesser< + Bitcoin, + Vec>, + Vec>, + ::ValidatorId, + BitcoinDepositChannelWitessingProcessor, + BitcoinDepositChannelWitnessingGenerator, +>; + +pub type BitcoinBlockHeightTracking = DsmElectoralSystem< + BlockHeightTrackingDSM<6, btc::BlockNumber, btc::Hash>, + ::ValidatorId, + (), + BlockHeightTrackingConsensus, +>; + +pub struct BitcoinDepositChannelWitnessingGenerator; + +impl + BlockElectionPropertiesGenerator< + btc::BlockNumber, + Vec>, + > for BitcoinDepositChannelWitnessingGenerator +{ + fn generate_election_properties( + block_witness_root: btc::BlockNumber, + ) -> Vec> { + // TODO: Channel expiry + BitcoinIngressEgress::active_deposit_channels_at(block_witness_root) + } +} + +pub struct BitcoinDepositChannelWitessingProcessor; + +impl ProcessBlockData>> + for BitcoinDepositChannelWitessingProcessor +{ + fn process_block_data( + current_block: btc::BlockNumber, + earliest_unprocessed_block: btc::BlockNumber, + witnesses: Vec<(btc::BlockNumber, Vec>)>, + ) -> Vec<(btc::BlockNumber, Vec>)> { + ProcessedUpTo::::put( + earliest_unprocessed_block.saturating_sub(1), + ); + + // TODO: Handle reorgs, in particular when data is already processed. + // We need to ensure that we don't process the same data twice. We could use a wrapper for + // the BlockData type here that can include some extra status data in it. + + if witnesses.is_empty() { + log::info!("No witnesses to process for block: {:?}", current_block); + } else { + log::info!("Processing witnesses: {:?} for block {:?}", witnesses, current_block); + } + for (deposit_block_number, deposits) in witnesses.clone() { + for deposit in deposits { + if deposit_block_number == current_block { + log::info!("Prewitness deposit submitted by election: {:?}", deposit); + let _ = BitcoinIngressEgress::process_channel_deposit_prewitness( + deposit, + deposit_block_number, + ); + } else if let Some(safety_margin) = + WitnessSafetyMargin::::get() + { + if deposit_block_number <= (current_block - safety_margin) { + log::info!("deposit election submitted by election: {:?}", deposit); + BitcoinIngressEgress::process_channel_deposit_full_witness( + deposit, + deposit_block_number, + ); + } + } + } + } + + // Do we need to return anything here? + witnesses + } +} + +pub struct BitcoinElectionHooks; + +impl Hooks for BitcoinElectionHooks { + fn on_finalize( + (block_height_tracking_identifiers, deposit_channel_witnessing_identifiers): ( + Vec< + ElectionIdentifier< + ::ElectionIdentifierExtra, + >, + >, + Vec< + ElectionIdentifier< + ::ElectionIdentifierExtra, + >, + >, + ), + ) -> Result<(), CorruptStorageError> { + log::info!("BitcoinElectionHooks::called"); + let chain_progress = BitcoinBlockHeightTracking::on_finalize::< + DerivedElectoralAccess< + _, + BitcoinBlockHeightTracking, + RunnerStorageAccess, + >, + >(block_height_tracking_identifiers, &())?; + + let chain_progress = match chain_progress { + Either::Left(x) => x, + Either::Right(x) => x, + }; + + // This code is going to be removed. + // convert the new chain progress to the old version + let chain_progress = match chain_progress { + ChainProgress::Reorg(added) => OldChainProgress::Reorg(RangeOfBlockWitnessRanges { + witness_from_root: added.start().clone(), + witness_to_root: added.end().clone(), + witness_period: 1, // horrible + }), + ChainProgress::Continuous(added) => + OldChainProgress::Continuous(RangeOfBlockWitnessRanges { + witness_from_root: added.start().clone(), + witness_to_root: added.end().clone(), + witness_period: 1, // horrible + }), + ChainProgress::None(block) => OldChainProgress::None(block), + ChainProgress::WaitingForFirstConsensus => OldChainProgress::WaitingForFirstConsensus, + }; + + log::info!("BitcoinElectionHooks::on_finalize: {:?}", chain_progress); + BitcoinDepositChannelWitnessing::on_finalize::< + DerivedElectoralAccess< + _, + BitcoinDepositChannelWitnessing, + RunnerStorageAccess, + >, + >(deposit_channel_witnessing_identifiers, &chain_progress)?; + + Ok(()) + } +} + +// Channel expiry: +// We need to process elections in order, even after a safe mode pause. This is to ensure channel +// expiry is done correctly. During safe mode pause, we could get into a situation where the current +// state suggests that a channel is expired, but at the time of a previous block which we have not +// yet processed, the channel was not expired. + +pub fn initial_state() -> InitialStateOf { + InitialState { + unsynchronised_state: (Default::default(), Default::default()), + unsynchronised_settings: ( + Default::default(), + // TODO: Write a migration to set this too. + BlockWitnesserSettings { max_concurrent_elections: 5 }, + ), + settings: (Default::default(), Default::default()), + } +} diff --git a/state-chain/runtime/src/lib.rs b/state-chain/runtime/src/lib.rs index c965737018..79148f29df 100644 --- a/state-chain/runtime/src/lib.rs +++ b/state-chain/runtime/src/lib.rs @@ -1025,6 +1025,12 @@ impl pallet_cf_elections::Config for Runtime { type WeightInfo = pallet_cf_elections::weights::PalletWeight; } +impl pallet_cf_elections::Config for Runtime { + type RuntimeEvent = RuntimeEvent; + type ElectoralSystemRunner = chainflip::bitcoin_elections::BitcoinElectoralSystemRunner; + type WeightInfo = pallet_cf_elections::weights::PalletWeight; +} + #[frame_support::runtime] mod runtime { #[runtime::runtime] @@ -1140,6 +1146,9 @@ mod runtime { #[runtime::pallet_index(47)] pub type AssetBalances = pallet_cf_asset_balances; + + #[runtime::pallet_index(48)] + pub type BitcoinElections = pallet_cf_elections; } /// The address format for describing accounts. @@ -1208,6 +1217,7 @@ pub type PalletExecutionOrder = ( SolanaChainTracking, // Elections SolanaElections, + BitcoinElections, // Vaults EthereumVault, PolkadotVault, @@ -1385,14 +1395,22 @@ mod benches { } impl_runtime_apis! { - impl runtime_apis::ElectoralRuntimeApi for Runtime { - fn cf_electoral_data(account_id: AccountId) -> Vec { + impl runtime_apis::ElectoralRuntimeApi for Runtime { + fn cf_solana_electoral_data(account_id: AccountId) -> Vec { SolanaElections::electoral_data(&account_id).encode() } - fn cf_filter_votes(account_id: AccountId, proposed_votes: Vec) -> Vec { + fn cf_solana_filter_votes(account_id: AccountId, proposed_votes: Vec) -> Vec { SolanaElections::filter_votes(&account_id, Decode::decode(&mut &proposed_votes[..]).unwrap_or_default()).encode() } + + fn cf_bitcoin_electoral_data(account_id: AccountId) -> Vec { + BitcoinElections::electoral_data(&account_id).encode() + } + + fn cf_bitcoin_filter_votes(account_id: AccountId, proposed_votes: Vec) -> Vec { + BitcoinElections::filter_votes(&account_id, Decode::decode(&mut &proposed_votes[..]).unwrap_or_default()).encode() + } } // START custom runtime APIs diff --git a/state-chain/runtime/src/runtime_apis.rs b/state-chain/runtime/src/runtime_apis.rs index 2b4a4eab9c..307c4e60fa 100644 --- a/state-chain/runtime/src/runtime_apis.rs +++ b/state-chain/runtime/src/runtime_apis.rs @@ -412,13 +412,17 @@ decl_runtime_apis!( ); decl_runtime_apis!( - pub trait ElectoralRuntimeApi { + pub trait ElectoralRuntimeApi { /// Returns SCALE encoded `Option>` - fn cf_electoral_data(account_id: AccountId32) -> Vec; + fn cf_solana_electoral_data(account_id: AccountId32) -> Vec; /// Returns SCALE encoded `BTreeSet>::ElectoralSystem>>` - fn cf_filter_votes(account_id: AccountId32, proposed_votes: Vec) -> Vec; + fn cf_solana_filter_votes(account_id: AccountId32, proposed_votes: Vec) -> Vec; + + fn cf_bitcoin_electoral_data(account_id: AccountId32) -> Vec; + + fn cf_bitcoin_filter_votes(account_id: AccountId32, proposed_votes: Vec) -> Vec; } );