Skip to content

Commit

Permalink
tzahi/Merge main-v0.13.2 into main (#153)
Browse files Browse the repository at this point in the history
* fix: global file changes trigger specific package CIs (#77)

Signed-off-by: Dori Medini <dori@starkware.co>

* chore(ci): allow warning during cargo doc (#127)

* chore: fix some clippy doc issues (#122)

* chore(ci): fix package selection (#123)

* chore(ci): add triggers to papyrus ci (#121)

* ci: add triggers for papyrus CI (#135)

* chore(ci): fix merge paths CI (#116)

Signed-off-by: Dori Medini <dori@starkware.co>

* refactor: move test files to dedicated directory (#105)

* chore: move (workspace) doc test to main (workspace) CI (#106)

Signed-off-by: Dori Medini <dori@starkware.co>

* Merge remote-tracking branch 'origin/main-v0.13.2' into tzahi/merge-main-v0.13.2-into-main

* No conflicts in main-v0.13.2 -> main merge, this commit is for any change needed to pass the CI.

Co-Authored-By: dorimedini-starkware <dori@starkware.co>
Co-Authored-By: dan-starkware <56217775+dan-starkware@users.noreply.github.com>
Co-Authored-By: aner-starkware <147302140+aner-starkware@users.noreply.github.com>
  • Loading branch information
4 people authored Jul 28, 2024
1 parent 5e01e6d commit a65d3f9
Show file tree
Hide file tree
Showing 15 changed files with 83 additions and 46 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/committer_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ jobs:
credentials_json: ${{ secrets.COMMITER_PRODUCTS_EXT_WRITER_JSON }}
- uses: 'google-github-actions/setup-gcloud@v2'
- run: echo "BENCH_INPUT_FILES_PREFIX=$(cat ./crates/committer_cli/src/tests/flow_test_files_prefix)" >> $GITHUB_ENV
- run: gcloud storage cp -r gs://committer-testing-artifacts/$BENCH_INPUT_FILES_PREFIX/* ./crates/committer_cli/benches
- run: gcloud storage cp -r gs://committer-testing-artifacts/$BENCH_INPUT_FILES_PREFIX/* ./crates/committer_cli/test_inputs
- run: cargo test -p committer_cli --release -- --include-ignored test_regression

benchmarking:
Expand Down Expand Up @@ -83,13 +83,13 @@ jobs:
# Input files didn't change.
- if: env.OLD_BENCH_INPUT_FILES_PREFIX == env.NEW_BENCH_INPUT_FILES_PREFIX
run: |
mv ./crates/committer_cli/benches/tree_flow_inputs.json_bu ./crates/committer_cli/benches/tree_flow_inputs.json
mv ./crates/committer_cli/benches/committer_flow_inputs.json_bu ./crates/committer_cli/benches/committer_flow_inputs.json
mv ./crates/committer_cli/benches/tree_flow_inputs.json_bu ./crates/committer_cli/test_inputs/tree_flow_inputs.json
mv ./crates/committer_cli/benches/committer_flow_inputs.json_bu ./crates/committer_cli/test_inputs/committer_flow_inputs.json
# Input files did change, download new inputs.
- if: env.OLD_BENCH_INPUT_FILES_PREFIX != env.NEW_BENCH_INPUT_FILES_PREFIX
run: |
gcloud storage cp -r gs://committer-testing-artifacts/$NEW_BENCH_INPUT_FILES_PREFIX/* ./crates/committer_cli/benches
gcloud storage cp -r gs://committer-testing-artifacts/$NEW_BENCH_INPUT_FILES_PREFIX/* ./crates/committer_cli/test_inputs
# Benchmark the new code, splitting the benchmarks, and prepare the results for posting a comment.
- run: bash ./crates/committer_cli/benches/bench_split_and_prepare_post.sh benchmarks_list.txt bench_new.txt
Expand Down
13 changes: 13 additions & 0 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,19 @@ jobs:

- run: scripts/clippy.sh

doc:
runs-on: ubuntu-latest
# env:
# RUSTDOCFLAGS: "-D warnings"
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- uses: Noelware/setup-protoc@1.1.0
with:
version: ${{env.PROTOC_VERSION}}
- run: cargo doc --workspace -r --document-private-items --no-deps

run-tests:
runs-on: ubuntu-20.04
steps:
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/merge_paths_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ on:
- 'scripts/merge_paths_test.py'
- 'scripts/merge_status.py'

jobs:
merge-paths-test:
runs-on: ubuntu-20.04
steps:
Expand Down
31 changes: 16 additions & 15 deletions .github/workflows/papyrus_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,14 @@ on:
push:
branches: [main]
paths:
- '.github/workflows/papyrus_ci.yml'
- 'Dockerfile'
- 'papyrus_utilities.Dockerfile'
- 'Cargo.toml'
- 'Cargo.lock'
- 'crates/papyrus**/**'
- 'crates/sequencing/**'
- 'crates/starknet_client/**'

pull_request:
types:
Expand All @@ -14,7 +21,14 @@ on:
- auto_merge_enabled
- edited # for when the PR title is edited
paths:
- '.github/workflows/papyrus_ci.yml'
- 'Dockerfile'
- 'papyrus_utilities.Dockerfile'
- 'Cargo.toml'
- 'Cargo.lock'
- 'crates/papyrus**/**'
- 'crates/sequencing/**'
- 'crates/starknet_client/**'

merge_group:
types: [checks_requested]
Expand All @@ -35,7 +49,7 @@ jobs:
- name: Build node
run: |
mkdir data
cargo build -r
cargo build -r -p papyrus_node
- name: Run executable
run: >
Expand All @@ -54,7 +68,7 @@ jobs:
- name: Build node
run: |
mkdir data
cargo build -r --no-default-features
cargo build -r -p papyrus_node --no-default-features
- name: Run executable
run: >
Expand Down Expand Up @@ -91,19 +105,6 @@ jobs:
env:
SEED: 0
doc:
runs-on: ubuntu-latest
env:
RUSTDOCFLAGS: "-D warnings"
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- uses: Noelware/setup-protoc@1.1.0
with:
version: ${{env.PROTOC_VERSION}}
- run: cargo doc --workspace -r --document-private-items --no-deps

codecov:
runs-on: ubuntu-latest
steps:
Expand Down
12 changes: 8 additions & 4 deletions .github/workflows/papyrus_nightly-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ jobs:
- run: brew install protobuf@$PROTOC_VERSION

- name: Build node
run: cargo build -r
run: cargo build -r -p papyrus_node

- name: Run executable
run: >
Expand All @@ -64,7 +64,7 @@ jobs:
- run: brew install protobuf@$PROTOC_VERSION

- run: |
cargo test -r
cargo test -r -p papyrus_node
env:
SEED: 0
Expand All @@ -85,10 +85,14 @@ jobs:
- uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- run: >
cargo test -r --test '*' -- --include-ignored --skip test_gw_integration_testnet;
cargo test -r
--test latency_histogram
--test gateway_integration_test
--test feeder_gateway_integration_test
-- --include-ignored --skip test_gw_integration_testnet;
cargo run -r -p papyrus_node --bin central_source_integration_test --features="futures-util tokio-stream"
# TODO(dvir): make this run only if the path 'crates/papyrus_storage/src/db/**' (same path as in the CI) was changed on the
# TODO(dvir): make this run only if the path 'crates/papyrus_storage/src/db/**' (same path as in the CI) was changed on the
# last day and increase the number of repetitions.
random-table-test:
runs-on: ubuntu-latest
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ pub(crate) trait TreeHashFunction<L: Leaf> {
fn compute_node_hash(node_data: &NodeData<L>) -> HashOutput;

/// The default implementation for internal nodes is based on the following reference:
/// https://docs.starknet.io/documentation/architecture_and_concepts/Network_Architecture/starknet-state/#trie_construction
/// <https://docs.starknet.io/documentation/architecture_and_concepts/Network_Architecture/starknet-state/#trie_construction>
fn compute_node_hash_with_inner_hash_function<H: HashFunction>(
node_data: &NodeData<L>,
) -> HashOutput {
Expand Down Expand Up @@ -76,7 +76,7 @@ impl TreeHashFunctionImpl {

/// Implementation of TreeHashFunction for contracts trie.
/// The implementation is based on the following reference:
/// https://docs.starknet.io/documentation/architecture_and_concepts/Network_Architecture/starknet-state/#trie_construction
/// <https://docs.starknet.io/documentation/architecture_and_concepts/Network_Architecture/starknet-state/#trie_construction>
impl TreeHashFunction<ContractState> for TreeHashFunctionImpl {
fn compute_leaf_hash(contract_state: &ContractState) -> HashOutput {
HashOutput(
Expand All @@ -100,7 +100,7 @@ impl TreeHashFunction<ContractState> for TreeHashFunctionImpl {

/// Implementation of TreeHashFunction for the classes trie.
/// The implementation is based on the following reference:
/// https://docs.starknet.io/documentation/architecture_and_concepts/Network_Architecture/starknet-state/#trie_construction
/// <https://docs.starknet.io/documentation/architecture_and_concepts/Network_Architecture/starknet-state/#trie_construction>
impl TreeHashFunction<CompiledClassHash> for TreeHashFunctionImpl {
fn compute_leaf_hash(compiled_class_hash: &CompiledClassHash) -> HashOutput {
let contract_class_leaf_version: Felt = Felt::from_hex(Self::CONTRACT_CLASS_LEAF_V0)
Expand All @@ -119,7 +119,7 @@ impl TreeHashFunction<CompiledClassHash> for TreeHashFunctionImpl {

/// Implementation of TreeHashFunction for the storage trie.
/// The implementation is based on the following reference:
/// https://docs.starknet.io/documentation/architecture_and_concepts/Network_Architecture/starknet-state/#trie_construction
/// <https://docs.starknet.io/documentation/architecture_and_concepts/Network_Architecture/starknet-state/#trie_construction>
impl TreeHashFunction<StarknetStorageValue> for TreeHashFunctionImpl {
fn compute_leaf_hash(storage_value: &StarknetStorageValue) -> HashOutput {
HashOutput(storage_value.0)
Expand Down
4 changes: 2 additions & 2 deletions crates/committer_cli/benches/committer_bench.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ use committer_cli::tests::utils::parse_from_python::TreeFlowInput;
use criterion::{criterion_group, criterion_main, Criterion};

const CONCURRENCY_MODE: bool = true;
const SINGLE_TREE_FLOW_INPUT: &str = include_str!("tree_flow_inputs.json");
const FLOW_TEST_INPUT: &str = include_str!("committer_flow_inputs.json");
const SINGLE_TREE_FLOW_INPUT: &str = include_str!("../test_inputs/tree_flow_inputs.json");
const FLOW_TEST_INPUT: &str = include_str!("../test_inputs/committer_flow_inputs.json");
const OUTPUT_PATH: &str = "benchmark_output.txt";

pub fn single_tree_flow_benchmark(criterion: &mut Criterion) {
Expand Down
1 change: 0 additions & 1 deletion crates/committer_cli/benches/committer_flow_inputs.json

This file was deleted.

4 changes: 2 additions & 2 deletions crates/committer_cli/src/parse_input/raw_input.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ type RawFelt = [u8; 32];
#[derive(Deserialize, Debug)]
/// Input to the committer.
pub(crate) struct RawInput {
/// Storage. Will be casted to HashMap<vec<u8>, Vec<u8>> to simulate DB access.
/// Storage. Will be casted to `HashMap<Vec<u8>, Vec<u8>>` to simulate DB access.
pub storage: Vec<RawStorageEntry>,
pub state_diff: RawStateDiff,
pub contracts_trie_root_hash: RawFelt,
Expand All @@ -30,7 +30,7 @@ pub(crate) struct RawConfigImpl {

#[derive(Deserialize_repr, Debug, Default, Serialize)]
#[repr(usize)]
/// Describes a log level https://docs.python.org/3/library/logging.html#logging-levels
/// Describes a log level <https://docs.python.org/3/library/logging.html#logging-levels>
pub(crate) enum PythonLogLevel {
NotSet = 0,
Info = 20,
Expand Down
8 changes: 4 additions & 4 deletions crates/committer_cli/src/tests/regression_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ use crate::tests::utils::parse_from_python::TreeFlowInput;
// 2. Fix the max time threshold to be the expected time for the benchmark test.
const MAX_TIME_FOR_SINGLE_TREE_BECHMARK_TEST: f64 = 5.0;
const MAX_TIME_FOR_COMMITTER_FLOW_BECHMARK_TEST: f64 = 5.0;
const SINGLE_TREE_FLOW_INPUT: &str = include_str!("../../benches/tree_flow_inputs.json");
const FLOW_TEST_INPUT: &str = include_str!("../../benches/committer_flow_inputs.json");
const SINGLE_TREE_FLOW_INPUT: &str = include_str!("../../test_inputs/tree_flow_inputs.json");
const FLOW_TEST_INPUT: &str = include_str!("../../test_inputs/committer_flow_inputs.json");
const OUTPUT_PATH: &str = "benchmark_output.txt";
const EXPECTED_NUMBER_OF_FILES: usize = 100;

Expand Down Expand Up @@ -163,10 +163,10 @@ pub async fn test_regression_committer_flow() {
#[tokio::test(flavor = "multi_thread")]
pub async fn test_regression_committer_all_files() {
assert_eq!(
fs::read_dir("./benches/regression_files").unwrap().count(),
fs::read_dir("./test_inputs/regression_files").unwrap().count(),
EXPECTED_NUMBER_OF_FILES
);
let dir_path = fs::read_dir("./benches/regression_files").unwrap();
let dir_path = fs::read_dir("./test_inputs/regression_files").unwrap();
for file_path in dir_path {
// TODO(Aner, 23/07/24): multi-thread the test.
test_single_committer_flow(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
This file is a placeholder for inputs to committer_flow regression test and benchmark.
9 changes: 4 additions & 5 deletions crates/sequencing/papyrus_consensus/src/state_machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,16 @@ use crate::types::Round;
pub enum StateMachineEvent {
/// StartRound is effective 2 questions:
/// 1. Is the local node the proposer for this round?
/// 2. If so, what value should be proposed?
/// While waiting for the response to this event, the state machine will buffer all other
/// events.
/// 2. If so, what value should be proposed? While waiting for the response to this event, the
/// state machine will buffer all other events.
///
/// How should the caller handle this event?
/// 1. If the local node is not the proposer, the caller responds with with `None` as the block
/// hash.
/// 2. If the local node is the proposer and a block hash was supplied by the state machine,
/// the caller responds with the supplied block hash.
/// the caller responds with the supplied block hash.
/// 3. If the local node is the proposer and no block hash was supplied by the state machine,
/// the caller must find/build a block to respond with.
/// the caller must find/build a block to respond with.
StartRound(Option<BlockHash>, Round),
/// Consensus message, can be both sent from and to the state machine.
Proposal(BlockHash, Round),
Expand Down
6 changes: 3 additions & 3 deletions crates/starknet_api/src/crypto/patricia_hash.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@
//!
//! The edges coming out of an internal node with a key `K` are:
//! - If there are input keys that start with 'K0...' and 'K1...', then two edges come out, marked
//! with '0' and '1' bits.
//! with '0' and '1' bits.
//! - Otherwise, a single edge mark with 'Z' is coming out. 'Z' is the longest string, such that all
//! the input keys that start with 'K...' start with 'KZ...' as well. Note, the order of the input
//! keys in this implementation forces 'Z' to be a zeros string.
//! the input keys that start with 'K...' start with 'KZ...' as well. Note, the order of the input
//! keys in this implementation forces 'Z' to be a zeros string.
//!
//! Hash of a node depends on the number of edges coming out of it:
//! - A leaf: The hash is the input value of its key.
Expand Down
23 changes: 21 additions & 2 deletions scripts/run_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,10 @@
DEPENDENCY_PATTERN = r"([a-zA-Z0-9_]+) [^(]* \(([^)]+)\)"


# Set of files which - if changed - should trigger tests for all packages.
ALL_TEST_TRIGGERS: Set[str] = {"Cargo.toml", "Cargo.lock"}


def get_workspace_tree() -> Dict[str, str]:
tree = dict()
res = subprocess.check_output("cargo tree --depth 0".split()).decode("utf-8").splitlines()
Expand Down Expand Up @@ -65,6 +69,12 @@ def get_package_dependencies(package_name: str) -> Set[str]:
return deps


def packages_to_test_due_to_global_changes(files: List[str]) -> Set[str]:
if len(set(files).intersection(ALL_TEST_TRIGGERS)) > 0:
return set(get_workspace_tree().keys())
return set()


def run_test(changes_only: bool, commit_id: Optional[str], concurrency: bool):
local_changes = get_local_changes(".", commit_id=commit_id)
modified_packages = get_modified_packages(local_changes)
Expand All @@ -73,17 +83,26 @@ def run_test(changes_only: bool, commit_id: Optional[str], concurrency: bool):
if changes_only:
for p in modified_packages:
deps = get_package_dependencies(p)
print(f"Running tests for {deps}")
tested_packages.update(deps)
if len(args) == 0:
print(f"Running tests for {tested_packages} (due to modifications in {modified_packages}).")
# Add global-triggered packages.
extra_packages = packages_to_test_due_to_global_changes(files=local_changes)
print(f"Running tests for global-triggered packages {extra_packages}")
tested_packages.update(extra_packages)
if len(tested_packages) == 0:
print("No changes detected.")
return

for package in tested_packages:
args.extend(["--package", package])

# If tested_packages is empty (i.e. changes_only is False), all packages will be tested (no
# args).
cmd = ["cargo", "test"] + args

# TODO: Less specific handling of active feature combinations in tests (which combos should be
# tested and which shouldn't?).
# If blockifier is to be tested, add the concurrency flag if requested.
if concurrency and "blockifier" in tested_packages:
cmd.extend(["--features", "concurrency"])

Expand Down

0 comments on commit a65d3f9

Please sign in to comment.