Skip to content

Commit

Permalink
Merge branch 'main' into idan/main/add-sequencer-deployment-tools
Browse files Browse the repository at this point in the history
  • Loading branch information
idan-starkware authored Nov 6, 2024
2 parents 5146f1c + a9f3df7 commit 94b9e1e
Show file tree
Hide file tree
Showing 108 changed files with 2,026 additions and 617 deletions.
1 change: 1 addition & 0 deletions .cargo/config.toml
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
[env]
CAIRO_NATIVE_RUNTIME_LIBRARY = "./libcairo_native_runtime.a"
LLVM_SYS_191_PREFIX = "/usr/lib/llvm-19/"
MLIR_SYS_190_PREFIX = "/usr/lib/llvm-19/"
TABLEGEN_190_PREFIX = "/usr/lib/llvm-19/"
Expand Down
6 changes: 5 additions & 1 deletion .github/workflows/blockifier_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,17 @@ jobs:
runs-on: starkware-ubuntu-20-04-medium
steps:
- uses: actions/checkout@v4
with:
# required to clone native as a gitsubmodule
submodules: recursive
fetch-depth: 0
- uses: ./.github/actions/bootstrap
# No features - build blockifier without features activated by dependencies in the workspace.
- run: cargo build -p blockifier
- run: cargo test -p blockifier
# transaction_serde is not activated by any workspace crate; test the build.
- run: cargo build -p blockifier --features transaction_serde
- run: cargo test -p blockifier --features transaction_serde
# cairo_native is not activated by any workspace crate; test the build.
# cairo_native is not activated by any workspace crate; test the build.
- run: cargo build -p blockifier --features cairo_native
- run: cargo test -p blockifier --features cairo_native
1 change: 1 addition & 0 deletions .github/workflows/blockifier_compiled_cairo.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ on:
- reopened
- synchronize
paths:
- 'Cargo.toml'
- '.github/workflows/blockifier_compiled_cairo.yml'
- 'crates/blockifier/feature_contracts/**'
- 'crates/blockifier/src/test_utils/cairo_compile.rs'
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ jobs:
# Environment setup.
- uses: actions/checkout@v4
with:
# required to clone native as a git submodule
submodules: recursive
# Fetch the entire history. Required to checkout the merge target commit, so the diff can
# be computed.
fetch-depth: 0
Expand Down
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,6 @@ scripts/papyrus/p2p_sync_e2e_test/data_server/
# Papyrus helm chart
deployments/papyrus/helm/config/*
!deployments/papyrus/helm/config/example.json

# Generated file used for running contracts compiled with Cairo Native
crates/blockifier/libcairo_native_runtime.a
3 changes: 3 additions & 0 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[submodule "crates/blockifier/cairo_native"]
path = crates/blockifier/cairo_native
url = https://github.com/lambdaclass/cairo_native
10 changes: 10 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ cairo-lang-sierra = "=2.8.4"
cairo-lang-sierra-to-casm = "2.8.4"
cairo-lang-starknet-classes = "2.8.4"
cairo-lang-utils = "2.8.4"
# Important: when updated, make sure to update the cairo-native submodule as well.
cairo-native = "0.2.0-alpha.4"
cairo-vm = "=1.0.1"
camelpaste = "0.1.0"
Expand Down Expand Up @@ -188,6 +189,7 @@ rand_distr = "0.4.3"
regex = "1.10.4"
replace_with = "0.1.7"
reqwest = "0.11"
retry = "2.0.0"
rstest = "0.17.0"
rustc-hex = "2.1.0"
schemars = "0.8.12"
Expand Down
12 changes: 6 additions & 6 deletions config/mempool/default_config.json
Original file line number Diff line number Diff line change
Expand Up @@ -734,7 +734,7 @@
"privacy": "Public",
"value": 5
},
"mempool_p2p_config.network_config.discovery_config.bootstrap_dial_retry_config.max_delay": {
"mempool_p2p_config.network_config.discovery_config.bootstrap_dial_retry_config.max_delay_seconds": {
"description": "The maximum delay in seconds for the exponential backoff strategy.",
"privacy": "Public",
"value": 5
Expand All @@ -749,15 +749,15 @@
"privacy": "Public",
"value": 120
},
"mempool_p2p_config.network_config.peer_manager_config.malicious_timeout": {
"description": "The duration a peer is blacklisted after being marked as malicious.",
"mempool_p2p_config.network_config.peer_manager_config.malicious_timeout_seconds": {
"description": "The duration in seconds a peer is blacklisted after being marked as malicious.",
"privacy": "Public",
"value": 31536000
},
"mempool_p2p_config.network_config.peer_manager_config.unstable_timeout": {
"description": "The duration a peer blacklisted after being reported as unstable.",
"mempool_p2p_config.network_config.peer_manager_config.unstable_timeout_millis": {
"description": "The duration in milliseconds a peer blacklisted after being reported as unstable.",
"privacy": "Public",
"value": 1
"value": 1000
},
"mempool_p2p_config.network_config.quic_port": {
"description": "The port that the node listens on for incoming quic connections.",
Expand Down
12 changes: 6 additions & 6 deletions config/papyrus/default_config.json
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@
"privacy": "Public",
"value": 5
},
"network.discovery_config.bootstrap_dial_retry_config.max_delay": {
"network.discovery_config.bootstrap_dial_retry_config.max_delay_seconds": {
"description": "The maximum delay in seconds for the exponential backoff strategy.",
"privacy": "Public",
"value": 5
Expand All @@ -199,15 +199,15 @@
"privacy": "Public",
"value": 120
},
"network.peer_manager_config.malicious_timeout": {
"description": "The duration a peer is blacklisted after being marked as malicious.",
"network.peer_manager_config.malicious_timeout_seconds": {
"description": "The duration in seconds a peer is blacklisted after being marked as malicious.",
"privacy": "Public",
"value": 31536000
},
"network.peer_manager_config.unstable_timeout": {
"description": "The duration a peer blacklisted after being reported as unstable.",
"network.peer_manager_config.unstable_timeout_millis": {
"description": "The duration in milliseconds a peer blacklisted after being reported as unstable.",
"privacy": "Public",
"value": 1
"value": 1000
},
"network.quic_port": {
"description": "The port that the node listens on for incoming quic connections.",
Expand Down
5 changes: 3 additions & 2 deletions crates/batcher/src/batcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,8 @@ impl Batcher {
pub async fn decision_reached(&mut self, input: DecisionReachedInput) -> BatcherResult<()> {
let proposal_id = input.proposal_id;
let proposal_output = self.proposal_manager.take_proposal_result(proposal_id).await?;
let ProposalOutput { state_diff, nonces, tx_hashes, .. } = proposal_output;
let ProposalOutput { state_diff, nonces: address_to_nonce, tx_hashes, .. } =
proposal_output;
// TODO: Keep the height from start_height or get it from the input.
let height = self.storage_reader.height().map_err(|err| {
error!("Failed to get height from storage: {}", err);
Expand All @@ -172,7 +173,7 @@ impl Batcher {
BatcherError::InternalError
})?;
if let Err(mempool_err) =
self.mempool_client.commit_block(CommitBlockArgs { nonces, tx_hashes }).await
self.mempool_client.commit_block(CommitBlockArgs { address_to_nonce, tx_hashes }).await
{
error!("Failed to commit block to mempool: {}", mempool_err);
// TODO: Should we rollback the state diff and return an error?
Expand Down
6 changes: 3 additions & 3 deletions crates/batcher/src/batcher_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -155,8 +155,8 @@ async fn decision_reached(
let expected_proposal_commitment = ProposalCommitment::default();
let tx_hashes = test_tx_hashes(0..5);
let tx_hashes_clone = tx_hashes.clone();
let nonces = test_contract_nonces(0..3);
let nonces_clone = nonces.clone();
let address_to_nonce = test_contract_nonces(0..3);
let nonces_clone = address_to_nonce.clone();

let mut proposal_manager = MockProposalManagerTraitWrapper::new();
proposal_manager.expect_wrap_take_proposal_result().with(eq(PROPOSAL_ID)).return_once(
Expand All @@ -174,7 +174,7 @@ async fn decision_reached(
);
mempool_client
.expect_commit_block()
.with(eq(CommitBlockArgs { nonces, tx_hashes }))
.with(eq(CommitBlockArgs { address_to_nonce, tx_hashes }))
.returning(|_| Ok(()));

storage_writer
Expand Down
10 changes: 7 additions & 3 deletions crates/batcher/src/block_builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ use tracing::{debug, error, info, trace};

use crate::papyrus_state::PapyrusReader;
use crate::transaction_executor::TransactionExecutorTrait;
use crate::transaction_provider::{TransactionProvider, TransactionProviderError};
use crate::transaction_provider::{NextTxs, TransactionProvider, TransactionProviderError};

#[derive(Debug, Error)]
pub enum BlockBuilderError {
Expand Down Expand Up @@ -142,14 +142,18 @@ impl BlockBuilderTrait for BlockBuilder {
async fn build_block(
&self,
deadline: tokio::time::Instant,
tx_provider: Box<dyn TransactionProvider>,
mut tx_provider: Box<dyn TransactionProvider>,
output_content_sender: tokio::sync::mpsc::UnboundedSender<Transaction>,
) -> BlockBuilderResult<BlockExecutionArtifacts> {
let mut block_is_full = false;
let mut execution_infos = IndexMap::new();
// TODO(yael 6/10/2024): delete the timeout condition once the executor has a timeout
while !block_is_full && tokio::time::Instant::now() < deadline {
let next_tx_chunk = tx_provider.get_txs(self.tx_chunk_size).await?;
let next_txs = tx_provider.get_txs(self.tx_chunk_size).await?;
let next_tx_chunk = match next_txs {
NextTxs::Txs(txs) => txs,
NextTxs::End => break,
};
debug!("Got {} transactions from the transaction provider.", next_tx_chunk.len());
if next_tx_chunk.is_empty() {
// TODO: Consider what is the best sleep duration.
Expand Down
47 changes: 44 additions & 3 deletions crates/batcher/src/block_builder_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use tokio::sync::mpsc::{UnboundedReceiver, UnboundedSender};
use crate::block_builder::{BlockBuilder, BlockBuilderTrait, BlockExecutionArtifacts};
use crate::test_utils::test_txs;
use crate::transaction_executor::MockTransactionExecutorTrait;
use crate::transaction_provider::MockTransactionProvider;
use crate::transaction_provider::{MockTransactionProvider, NextTxs};

const BLOCK_GENERATION_DEADLINE_SECS: u64 = 1;
const TX_CHANNEL_SIZE: usize = 50;
Expand Down Expand Up @@ -144,6 +144,26 @@ fn test_expectations_with_delay(
(mock_transaction_executor, mock_tx_provider, expected_block_artifacts)
}

fn stream_done_test_expectations(
input_txs: &[Transaction],
) -> (MockTransactionExecutorTrait, MockTransactionProvider, BlockExecutionArtifacts) {
let block_size = input_txs.len();
let input_txs_cloned = input_txs.to_vec();
let mut mock_transaction_executor = MockTransactionExecutorTrait::new();

mock_transaction_executor
.expect_add_txs_to_block()
.withf(move |blockifier_input| compare_tx_hashes(&input_txs_cloned, blockifier_input))
.return_once(move |_| (0..block_size).map(|_| Ok(execution_info())).collect());

let expected_block_artifacts =
set_close_block_expectations(&mut mock_transaction_executor, block_size);

let mock_tx_provider = mock_tx_provider_stream_done(input_txs.to_vec());

(mock_transaction_executor, mock_tx_provider, expected_block_artifacts)
}

// Fill the executor outputs with some non-default values to make sure the block_builder uses
// them.
fn block_builder_expected_output(execution_info_len: usize) -> BlockExecutionArtifacts {
Expand Down Expand Up @@ -180,7 +200,24 @@ fn mock_tx_provider_limited_calls(
.expect_get_txs()
.times(n_calls)
.with(eq(TX_CHUNK_SIZE))
.returning(move |_n_txs| Ok(input_chunks.remove(0)));
.returning(move |_n_txs| Ok(NextTxs::Txs(input_chunks.remove(0))));
mock_tx_provider
}

fn mock_tx_provider_stream_done(input_chunk: Vec<Transaction>) -> MockTransactionProvider {
let mut mock_tx_provider = MockTransactionProvider::new();
let mut seq = Sequence::new();
mock_tx_provider
.expect_get_txs()
.times(1)
.in_sequence(&mut seq)
.with(eq(TX_CHUNK_SIZE))
.returning(move |_n_txs| Ok(NextTxs::Txs(input_chunk.clone())));
mock_tx_provider
.expect_get_txs()
.times(1)
.in_sequence(&mut seq)
.returning(|_n_txs| Ok(NextTxs::End));
mock_tx_provider
}

Expand All @@ -198,7 +235,10 @@ fn mock_tx_provider_limitless_calls(
}

fn add_limitless_empty_calls(mock_tx_provider: &mut MockTransactionProvider) {
mock_tx_provider.expect_get_txs().with(eq(TX_CHUNK_SIZE)).returning(|_n_txs| Ok(Vec::new()));
mock_tx_provider
.expect_get_txs()
.with(eq(TX_CHUNK_SIZE))
.returning(|_n_txs| Ok(NextTxs::Txs(Vec::new())));
}

fn compare_tx_hashes(input: &[Transaction], blockifier_input: &[BlockifierTransaction]) -> bool {
Expand Down Expand Up @@ -247,6 +287,7 @@ async fn run_build_block(
#[case::empty_block(0, vec![], empty_block_test_expectations())]
#[case::block_full(1, test_txs(0..3), block_full_test_expectations(&input_txs, expected_block_size))]
#[case::deadline_reached_after_first_chunk(3, test_txs(0..6), test_expectations_with_delay(&input_txs))]
#[case::stream_done(2, test_txs(0..2), stream_done_test_expectations(&input_txs))]
#[tokio::test]
async fn test_build_block(
#[case] expected_block_size: usize,
Expand Down
30 changes: 27 additions & 3 deletions crates/batcher/src/transaction_provider.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,16 @@ pub enum TransactionProviderError {
MempoolError(#[from] MempoolClientError),
}

#[derive(Debug, PartialEq)]
pub enum NextTxs {
Txs(Vec<Transaction>),
End,
}

#[cfg_attr(test, automock)]
#[async_trait]
pub trait TransactionProvider: Send + Sync {
async fn get_txs(&self, n_txs: usize) -> Result<Vec<Transaction>, TransactionProviderError>;
async fn get_txs(&mut self, n_txs: usize) -> Result<NextTxs, TransactionProviderError>;
}

pub struct ProposeTransactionProvider {
Expand All @@ -23,8 +29,26 @@ pub struct ProposeTransactionProvider {

#[async_trait]
impl TransactionProvider for ProposeTransactionProvider {
async fn get_txs(&self, n_txs: usize) -> Result<Vec<Transaction>, TransactionProviderError> {
async fn get_txs(&mut self, n_txs: usize) -> Result<NextTxs, TransactionProviderError> {
// TODO: Get also L1 transactions.
Ok(self.mempool_client.get_txs(n_txs).await?)
Ok(NextTxs::Txs(self.mempool_client.get_txs(n_txs).await?))
}
}

pub struct ValidateTransactionProvider {
pub tx_receiver: tokio::sync::mpsc::Receiver<Transaction>,
}

#[async_trait]
impl TransactionProvider for ValidateTransactionProvider {
async fn get_txs(&mut self, n_txs: usize) -> Result<NextTxs, TransactionProviderError> {
let mut buffer = Vec::with_capacity(n_txs);
self.tx_receiver.recv_many(&mut buffer, n_txs).await;
// If the buffer is empty, it means that the stream was dropped, otherwise it would have
// been waiting for transactions.
if buffer.is_empty() {
return Ok(NextTxs::End);
}
Ok(NextTxs::Txs(buffer))
}
}
1 change: 0 additions & 1 deletion crates/batcher_types/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ edition.workspace = true
license.workspace = true
repository.workspace = true


[lints]
workspace = true

Expand Down
Loading

0 comments on commit 94b9e1e

Please sign in to comment.