Skip to content

Commit

Permalink
style: Fix many style issues
Browse files Browse the repository at this point in the history
- Adhere to agreed-upon maximum line length
- Use `.map()` or `.map_err()` instead re-implementing those methods
- Attach existing file `transaction_is_valid` to the module tree
- De-duplicate copy-and-pasted code
- Use available macros to their full potential
- Use trait's canonical function order over random order
- Avoid creating inlinable use-once variables (where sensical)
- Use imports canonically
- De-duplicate imports in test modules
- Fix spelling mistakes
- Align “BEFORE” and “AFTER” comments

changelog: ignore
  • Loading branch information
jan-ferdinand committed Nov 20, 2024
1 parent 9c21222 commit 5c7e1cd
Show file tree
Hide file tree
Showing 23 changed files with 155 additions and 157 deletions.
4 changes: 2 additions & 2 deletions src/bin/dashboard_src/dashboard_app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,19 @@ use std::cell::RefCell;
use std::cell::RefMut;
use std::collections::HashMap;
use std::error::Error;
use std::io;
use std::io::Stdout;
use std::io::{self};
use std::net::SocketAddr;
use std::rc::Rc;
use std::sync::Arc;
use std::time::Duration;

use crossterm::event;
use crossterm::event::DisableMouseCapture;
use crossterm::event::EnableMouseCapture;
use crossterm::event::Event;
use crossterm::event::KeyCode;
use crossterm::event::KeyEventKind;
use crossterm::event::{self};
use crossterm::execute;
use crossterm::terminal::disable_raw_mode;
use crossterm::terminal::enable_raw_mode;
Expand Down
3 changes: 1 addition & 2 deletions src/bin/triton-vm-prover.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use std::io::BufRead;
use std::io::Write;
use std::io::{self};

use tasm_lib::triton_vm::prelude::Program;
use tasm_lib::triton_vm::proof::Claim;
Expand All @@ -18,7 +17,7 @@ fn main() {
// pass it with ThreadPriority::CrossPlatform(x).
set_current_thread_priority(ThreadPriority::Min).unwrap();

let stdin = io::stdin();
let stdin = std::io::stdin();
let mut iterator = stdin.lock().lines();
let claim: Claim = serde_json::from_str(&iterator.next().unwrap().unwrap()).unwrap();
let program: Program = serde_json::from_str(&iterator.next().unwrap().unwrap()).unwrap();
Expand Down
74 changes: 49 additions & 25 deletions src/models/blockchain/block/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -436,7 +436,7 @@ impl Block {
/// kernels. The net result is that broadcasting transaction on other
/// networks invalidates the lock script proofs.
pub(crate) fn premine_sender_randomness(network: Network) -> Digest {
Digest::new([bfe!(network as u64), bfe!(0), bfe!(0), bfe!(0), bfe!(0)])
Digest::new(bfe_array![network as u64, 0, 0, 0, 0])
}

fn premine_distribution() -> Vec<(ReceivingAddress, NeptuneCoins)> {
Expand Down Expand Up @@ -587,7 +587,8 @@ impl Block {
// b) Block proof is valid
// c) Max block size is not exceeded
// 2. The transaction is valid.
// a) Verify that MS removal records are valid, done against previous `mutator_set_accumulator`,
// a) Verify that MS removal records are valid, done against previous
// `mutator_set_accumulator`,
// b) Verify that all removal records have unique index sets
// c) Verify that the mutator set update induced by the block sends
// the old mutator set accumulator to the new one.
Expand Down Expand Up @@ -626,11 +627,13 @@ impl Block {
> self.kernel.header.timestamp
{
warn!(
"Block's timestamp ({}) should be greater than or equal to that of previous block ({}) plus minimum block time ({}) \nprevious <= current ?? {}",
"Block's timestamp ({}) should be greater than or equal to that of previous block \
({}) plus minimum block time ({}) \nprevious <= current ?? {}",
self.kernel.header.timestamp,
previous_block.kernel.header.timestamp,
minimum_block_time,
previous_block.kernel.header.timestamp + minimum_block_time <= self.kernel.header.timestamp
previous_block.kernel.header.timestamp + minimum_block_time
<= self.kernel.header.timestamp
);
return false;
}
Expand All @@ -645,15 +648,21 @@ impl Block {
);
if self.kernel.header.difficulty != expected_difficulty {
warn!(
"Value for new difficulty is incorrect. actual: {}, expected: {expected_difficulty}",
"Value for new difficulty is incorrect. \
actual: {}, expected: {expected_difficulty}",
self.kernel.header.difficulty,
);
return false;
}
let expected_cumulative_proof_of_work =
previous_block.header().cumulative_proof_of_work + previous_block.header().difficulty;
if self.header().cumulative_proof_of_work != expected_cumulative_proof_of_work {
warn!("Block's cumulative proof-of-work number does not match with expectation.\n\nBlock's pow: {}\nexpectation: {}", self.header().cumulative_proof_of_work, expected_cumulative_proof_of_work);
warn!(
"Block's cumulative proof-of-work number does not match with expectation.\n\n\
Block's pow: {}\nexpectation: {}",
self.header().cumulative_proof_of_work,
expected_cumulative_proof_of_work
);
return false;
}

Expand All @@ -671,7 +680,10 @@ impl Block {
// 1.a) Verify appendix contains required claims
for required_claim in BlockAppendix::consensus_claims(self.body()) {
if !self.appendix().contains(&required_claim) {
warn!("Block appendix does not contain required claim.\nRequired claim: {required_claim:?}");
warn!(
"Block appendix does not contain required claim.\n\
Required claim: {required_claim:?}"
);
return false;
}
}
Expand Down Expand Up @@ -756,7 +768,7 @@ impl Block {
return false;
}

// 2.d) verify that the transaction timestamp is less than or equal to the block's timestamp.
// 2.d) verify that the transaction timestamp is less than or equal to the block's timestamp
if self.kernel.body.transaction_kernel.timestamp > self.kernel.header.timestamp {
warn!(
"Transaction timestamp ({}) is is larger than that of block ({})",
Expand All @@ -771,7 +783,10 @@ impl Block {
let coinbase = self.kernel.body.transaction_kernel.coinbase;
if let Some(coinbase) = coinbase {
if coinbase > block_subsidy {
warn!("Coinbase exceeds block subsidy. coinbase: {coinbase}; block subsidy: {block_subsidy}.");
warn!(
"Coinbase exceeds block subsidy. coinbase: {coinbase}; \
block subsidy: {block_subsidy}."
);
return false;
}
}
Expand Down Expand Up @@ -950,8 +965,6 @@ impl Block {

#[cfg(test)]
mod block_tests {
use std::collections::HashSet;

use rand::thread_rng;
use rand::Rng;
use rayon::iter::IntoParallelRefIterator;
Expand Down Expand Up @@ -980,16 +993,17 @@ mod block_tests {

#[test]
fn all_genesis_blocks_have_unique_mutator_set_hashes() {
let mut genesis_block_msa_digests: HashSet<Digest> = HashSet::default();

for network in Network::iter() {
assert!(genesis_block_msa_digests.insert(
Block::genesis_block(network)
.body()
.mutator_set_accumulator
.hash(),
), "All genesis blocks must have unique MSA digests, otherwise replay attacks are possible");
}
let mutator_set_hash = |network| {
Block::genesis_block(network)
.body()
.mutator_set_accumulator
.hash()
};

assert!(
Network::iter().map(mutator_set_hash).all_unique(),
"All genesis blocks must have unique MSA digests, else replay attacks are possible",
);
}

#[test]
Expand Down Expand Up @@ -1041,7 +1055,8 @@ mod block_tests {
assert_eq!(BFieldElement::MAX / u64::from(difficulty), elem.value());
}

// Verify that a difficulty of BFieldElement::MAX accepts all digests where the last BFieldElement is zero
// Verify that a difficulty of BFieldElement::MAX accepts all digests where the
// last BFieldElement is zero
let some_difficulty = Difficulty::new([1, u32::MAX, 0, 0, 0]);
let some_threshold_actual: Digest = some_difficulty.target();

Expand Down Expand Up @@ -1412,9 +1427,18 @@ mod block_tests {
block2.body().transaction_kernel.inputs.clone(),
block2.body().transaction_kernel.outputs.clone(),
);
mutator_set_update_guesser_fees.apply_to_accumulator_and_records(&mut ms, &mut mutator_set_update_tx.removals.iter_mut().collect_vec())
.expect("applying mutator set update derived from block 2 to mutator set from block 1 should work");
mutator_set_update_tx.apply_to_accumulator(&mut ms).expect("applying mutator set update derived from block 2 to mutator set from block 1 should work");

let reason = "applying mutator set update derived from block 2 \
to mutator set from block 1 should work";
mutator_set_update_guesser_fees
.apply_to_accumulator_and_records(
&mut ms,
&mut mutator_set_update_tx.removals.iter_mut().collect_vec(),
)
.expect(reason);
mutator_set_update_tx
.apply_to_accumulator(&mut ms)
.expect(reason);

assert_eq!(ms.hash(), block2.body().mutator_set_accumulator.hash());
}
Expand Down
1 change: 1 addition & 0 deletions src/models/blockchain/block/validity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ pub mod correct_mmr_update;
pub mod correct_mutator_set_update;
pub mod mmr_membership;
pub mod predecessor_is_valid;
pub mod transaction_is_valid;

/// The validity of a block, in the principal case, decomposes into these subclaims.
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, GetSize, BFieldCodec)]
Expand Down
5 changes: 3 additions & 2 deletions src/models/blockchain/block/validity/transaction_is_valid.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ use crate::models::blockchain::transaction::validity::single_proof::SingleProof;
use crate::models::blockchain::transaction::validity::tasm::claims::generate_single_proof_claim::GenerateSingleProofClaim;
use crate::models::blockchain::transaction::TransactionProof;
use crate::models::proof_abstractions::mast_hash::MastHash;
use crate::models::proof_abstractions::tasm::builtins::{self as tasmlib};
use crate::models::proof_abstractions::tasm::builtins as tasmlib;
use crate::models::proof_abstractions::tasm::program::ConsensusProgram;
use crate::models::proof_abstractions::SecretWitness;

Expand All @@ -46,7 +46,7 @@ impl From<BlockPrimitiveWitness> for TransactionIsValidWitness {
let block_body = block_primitive_witness.body();
let mast_path_txk = block_body.mast_path(BlockBodyField::TransactionKernel);
let TransactionProof::SingleProof(single_proof) =
&block_primitive_witness.transaction.proof
&block_primitive_witness.transaction().proof
else {
panic!("cannot make a block whose transaction is not supported by a single proof");
};
Expand Down Expand Up @@ -101,6 +101,7 @@ impl SecretWitness for TransactionIsValidWitness {
pub(crate) struct TransactionIsValid;

impl TransactionIsValid {
#[expect(unused)] // todo: still needed?
pub(crate) fn claim(block_body_mast_hash: Digest) -> Claim {
let input = block_body_mast_hash.reversed().values().to_vec();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ impl ConsensusProgram for CollectTypeScripts {
recurse

// BEFORE: _ *coin[j] *type_script_hashes
// AFTER: _ *coin[j] *
// AFTER: _ *coin[j] *
{push_digest_to_list}:
dup 1
// _ *coin[j] *type_script_hashes *coin[j]
Expand Down
67 changes: 20 additions & 47 deletions src/models/blockchain/transaction/validity/proof_collection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,49 +69,15 @@ impl ProofCollection {
)
}
pub fn can_produce(primitive_witness: &PrimitiveWitness) -> bool {
let (
removal_records_integrity_witness,
collect_lock_scripts_witness,
kernel_to_outputs_witness,
collect_type_scripts_witness,
) = Self::extract_specific_witnesses(primitive_witness);

// verify graceful halts
let removal_records_integrity_halts = if let Ok(output) = RemovalRecordsIntegrity.run_rust(
&removal_records_integrity_witness.standard_input(),
removal_records_integrity_witness.nondeterminism(),
) {
output == removal_records_integrity_witness.output()
} else {
false
};

let collect_lock_scripts_halts = if let Ok(output) = CollectLockScripts.run_rust(
&collect_lock_scripts_witness.standard_input(),
collect_lock_scripts_witness.nondeterminism(),
) {
output == collect_lock_scripts_witness.output()
} else {
false
};

let kernel_to_outputs_halts = if let Ok(output) = KernelToOutputs.run_rust(
&kernel_to_outputs_witness.standard_input(),
kernel_to_outputs_witness.nondeterminism(),
) {
output == kernel_to_outputs_witness.output()
} else {
false
};

let collect_type_scripts_halts = if let Ok(output) = CollectTypeScripts.run_rust(
&collect_type_scripts_witness.standard_input(),
collect_type_scripts_witness.nondeterminism(),
) {
output == collect_type_scripts_witness.output()
} else {
false
};
fn witness_halts_gracefully(
program: impl ConsensusProgram,
witness: impl SecretWitness,
) -> bool {
program
.run_rust(&witness.standard_input(), witness.nondeterminism())
.map(|output| output == witness.output())
.unwrap_or(false)
}

let txk_mast_hash = primitive_witness.kernel.mast_hash();
let txk_mast_hash_as_input = PublicInput::new(txk_mast_hash.reversed().values().to_vec());
Expand All @@ -127,10 +93,17 @@ impl ProofCollection {
.iter()
.all(|ts| ts.halts_gracefully(txk_mast_hash, salted_inputs_hash, salted_outputs_hash));

removal_records_integrity_halts
|| collect_lock_scripts_halts
|| kernel_to_outputs_halts
|| collect_type_scripts_halts
let (
removal_records_integrity_witness,
collect_lock_scripts_witness,
kernel_to_outputs_witness,
collect_type_scripts_witness,
) = Self::extract_specific_witnesses(primitive_witness);

witness_halts_gracefully(RemovalRecordsIntegrity, removal_records_integrity_witness)
|| witness_halts_gracefully(CollectLockScripts, collect_lock_scripts_witness)
|| witness_halts_gracefully(KernelToOutputs, kernel_to_outputs_witness)
|| witness_halts_gracefully(CollectTypeScripts, collect_type_scripts_witness)
|| all_lock_scripts_halt
|| all_type_scripts_halt
}
Expand Down
16 changes: 8 additions & 8 deletions src/models/blockchain/transaction/validity/single_proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -307,13 +307,6 @@ impl SingleProof {
}

impl ConsensusProgram for SingleProof {
/// Get the program hash digest.
fn hash(&self) -> Digest {
static HASH: OnceLock<Digest> = OnceLock::new();

*HASH.get_or_init(|| self.program().hash())
}

fn source(&self) {
let stark: Stark = Stark::default();
let own_program_digest: Digest = tasmlib::own_program_digest();
Expand Down Expand Up @@ -850,6 +843,13 @@ impl ConsensusProgram for SingleProof {
{&library.all_imports()}
}
}

/// Get the program hash digest.
fn hash(&self) -> Digest {
static HASH: OnceLock<Digest> = OnceLock::new();

*HASH.get_or_init(|| self.program().hash())
}
}

#[cfg(test)]
Expand Down Expand Up @@ -878,7 +878,7 @@ mod test {
#[tokio::test]
async fn invalid_discriminant_crashes_execution() {
let pub_input = PublicInput::new(bfe_vec![0, 0, 0, 0, 0]);
for illegal_discriminant_value in [bfe!(-1), bfe!(3), bfe!(4), bfe!(1u64 << 40)] {
for illegal_discriminant_value in bfe_array![-1, 3, 4, 1 << 40] {
let init_ram: HashMap<_, _> = [(
FIRST_NON_DETERMINISTICALLY_INITIALIZED_MEMORY_ADDRESS,
illegal_discriminant_value,
Expand Down
Loading

0 comments on commit 5c7e1cd

Please sign in to comment.