From a55bfb33bb5901a0ac6f0428eae0d7686592c972 Mon Sep 17 00:00:00 2001 From: Gilad Chase Date: Tue, 5 Nov 2024 19:56:33 +0200 Subject: [PATCH] chore: add lints to papyrus_storage Lior banned `as` repo-wide, unless absolutely necessary. Notable changes, the rest is either `From` or usize->u64 casts: 1. storage_benchmark: all usages are usize, not u128, so switching. 2. db_stats: to usages needed `as f64` so added exception, last usage used f64 pow, no need to pass through i32. 3. serializers: multi-type use macro, added exception. 4. utils.rs: metrics need as f64. --- crates/papyrus_storage/Cargo.toml | 6 ++--- .../src/bin/storage_benchmark.rs | 8 +++--- crates/papyrus_storage/src/db/db_stats.rs | 6 +++-- .../src/mmap_file/mmap_file_test.rs | 26 +++++++++---------- crates/papyrus_storage/src/mmap_file/mod.rs | 3 ++- .../src/serialization/serializers.rs | 5 ++-- .../papyrus_storage/src/state/state_test.rs | 4 +-- crates/papyrus_storage/src/utils.rs | 11 ++++++-- crates/papyrus_storage/src/utils_test.rs | 4 +-- 9 files changed, 41 insertions(+), 32 deletions(-) diff --git a/crates/papyrus_storage/Cargo.toml b/crates/papyrus_storage/Cargo.toml index b1710444a4..21ee756462 100644 --- a/crates/papyrus_storage/Cargo.toml +++ b/crates/papyrus_storage/Cargo.toml @@ -74,7 +74,5 @@ test-case.workspace = true test-log.workspace = true tokio = { workspace = true, features = ["full", "sync"] } -[lints.rust] -# See [here](https://github.com/taiki-e/cargo-llvm-cov/issues/370) for a discussion on why this is -# needed (from rust 1.80). -unexpected_cfgs = { level = "warn", check-cfg = ['cfg(coverage_nightly)'] } +[lints] +workspace = true diff --git a/crates/papyrus_storage/src/bin/storage_benchmark.rs b/crates/papyrus_storage/src/bin/storage_benchmark.rs index 61da955958..d8d164929e 100644 --- a/crates/papyrus_storage/src/bin/storage_benchmark.rs +++ b/crates/papyrus_storage/src/bin/storage_benchmark.rs @@ -103,7 +103,7 @@ impl Times { results.push(Entry { name: "get_class_hash_at".to_string(), unit: "Microseconds".to_string(), - value: get_class_hash_at_median as usize, + value: get_class_hash_at_median, }); let get_nonce_at_median = if self.get_nonce_at.is_empty() { @@ -114,7 +114,7 @@ impl Times { results.push(Entry { name: "get_nonce_at".to_string(), unit: "Microseconds".to_string(), - value: get_nonce_at_median as usize, + value: get_nonce_at_median, }); let get_storage_at_median = if self.get_storage_at.is_empty() { @@ -126,7 +126,7 @@ impl Times { results.push(Entry { name: "get_storage_at".to_string(), unit: "Microseconds".to_string(), - value: get_storage_at_median as usize, + value: get_storage_at_median, }); results @@ -155,7 +155,7 @@ impl Times { struct Entry { name: String, unit: String, - value: usize, + value: u128, } struct CliParams { diff --git a/crates/papyrus_storage/src/db/db_stats.rs b/crates/papyrus_storage/src/db/db_stats.rs index a5649b56e5..ef9462e345 100644 --- a/crates/papyrus_storage/src/db/db_stats.rs +++ b/crates/papyrus_storage/src/db/db_stats.rs @@ -59,6 +59,7 @@ impl DbReader { leaf_pages: stat.leaf_pages(), overflow_pages: stat.overflow_pages(), total_size: stat.total_size(), + #[allow(clippy::as_conversions)] db_portion: stat.total_size() as f64 / self.env.stat()?.total_size() as f64, }) } @@ -97,6 +98,7 @@ fn readable_bytes(bytes_num: &u64, s: S) -> Result where S: serde::Serializer, { + #[allow(clippy::as_conversions)] s.serialize_str(&human_bytes(*bytes_num as f64)) } @@ -106,7 +108,7 @@ fn float_precision(float: &f64, s: S) -> Result where S: serde::Serializer, { - const PRECISION: u32 = 4; - let power = u32::pow(10, PRECISION) as f64; + const PRECISION: i32 = 4; + let power = 10_f64.powi(PRECISION); s.serialize_f64((*float * power).round() / power) } diff --git a/crates/papyrus_storage/src/mmap_file/mmap_file_test.rs b/crates/papyrus_storage/src/mmap_file/mmap_file_test.rs index cab23312c3..95bb548bd8 100644 --- a/crates/papyrus_storage/src/mmap_file/mmap_file_test.rs +++ b/crates/papyrus_storage/src/mmap_file/mmap_file_test.rs @@ -151,32 +151,32 @@ fn grow_file() { open_file::>>(config.clone(), file_path.clone(), offset) .unwrap(); // file_size = 4 (growth_step), offset = 0 - let mut file_size = file.metadata().unwrap().len(); - assert_eq!(file_size, config.growth_step as u64); + let mut file_size = usize::try_from(file.metadata().unwrap().len()).unwrap(); + assert_eq!(file_size, config.growth_step); assert_eq!(offset, 0); offset += writer.append(&data).len; // file_size = 8 (2 * growth_step), offset = 3 (serialization_size) - file_size = file.metadata().unwrap().len(); - assert_eq!(file_size, 2 * config.growth_step as u64); + file_size = file.metadata().unwrap().len().try_into().unwrap(); + assert_eq!(file_size, 2 * config.growth_step); assert_eq!(offset, serialization_size); offset += writer.append(&data).len; // file_size = 12 (3 * growth_step), offset = 6 (2 * serialization_size) - file_size = file.metadata().unwrap().len(); - assert_eq!(file_size, 3 * config.growth_step as u64); + file_size = file.metadata().unwrap().len().try_into().unwrap(); + assert_eq!(file_size, 3 * config.growth_step); assert_eq!(offset, 2 * serialization_size); offset += writer.append(&data).len; // file_size = 12 (3 * growth_step), offset = 9 (3 * serialization_size) - file_size = file.metadata().unwrap().len(); - assert_eq!(file_size, 3 * config.growth_step as u64); + file_size = file.metadata().unwrap().len().try_into().unwrap(); + assert_eq!(file_size, 3 * config.growth_step); assert_eq!(offset, 3 * serialization_size); offset += writer.append(&data).len; // file_size = 16 (4 * growth_step), offset = 12 (4 * serialization_size) - file_size = file.metadata().unwrap().len(); - assert_eq!(file_size, 4 * config.growth_step as u64); + file_size = file.metadata().unwrap().len().try_into().unwrap(); + assert_eq!(file_size, 4 * config.growth_step); assert_eq!(offset, 4 * serialization_size); } @@ -187,9 +187,9 @@ fn grow_file() { .truncate(false) .open(file_path.clone()) .unwrap(); - assert_eq!(file.metadata().unwrap().len(), 4 * config.growth_step as u64); + assert_eq!(usize::try_from(file.metadata().unwrap().len()).unwrap(), 4 * config.growth_step); let _ = open_file::>>(config.clone(), file_path, offset).unwrap(); - assert_eq!(file.metadata().unwrap().len(), 4 * config.growth_step as u64); + assert_eq!(usize::try_from(file.metadata().unwrap().len()).unwrap(), 4 * config.growth_step); dir.close().unwrap(); } @@ -240,7 +240,7 @@ async fn write_read_different_locations() { writer.append(&data); writer.flush(); { - *lock.write().await = round as usize; + *lock.write().await = round.into(); } barrier.wait().await; data = data.into_iter().map(|x| x + 2).collect(); diff --git a/crates/papyrus_storage/src/mmap_file/mod.rs b/crates/papyrus_storage/src/mmap_file/mod.rs index 6928895122..2d61fa9901 100644 --- a/crates/papyrus_storage/src/mmap_file/mod.rs +++ b/crates/papyrus_storage/src/mmap_file/mod.rs @@ -150,8 +150,9 @@ impl MMapFile { fn grow(&mut self) { self.flush(); let new_size = self.size + self.config.growth_step; + let new_size_u64 = u64::try_from(new_size).expect("usize should fit in u64"); debug!("Growing file to size: {}", new_size); - self.file.set_len(new_size as u64).expect("Failed to set the file size"); + self.file.set_len(new_size_u64).expect("Failed to set the file size"); self.size = new_size; } diff --git a/crates/papyrus_storage/src/serialization/serializers.rs b/crates/papyrus_storage/src/serialization/serializers.rs index 25f0befef3..a3e4306bb3 100644 --- a/crates/papyrus_storage/src/serialization/serializers.rs +++ b/crates/papyrus_storage/src/serialization/serializers.rs @@ -594,6 +594,7 @@ macro_rules! auto_storage_serde { ($(pub)? enum $name:ident { $($variant:ident $( ($ty:ty) )? = $num:expr ,)* } $($rest:tt)*) => { impl StorageSerde for $name { fn serialize_into(&self, res: &mut impl std::io::Write) -> Result<(), StorageSerdeError> { + #[allow(clippy::as_conversions)] match self { $( variant!( value, $variant $( ($ty) )?) => { @@ -765,7 +766,7 @@ impl StorageSerde for u8 { // TODO(dan): get rid of usize. impl StorageSerde for usize { fn serialize_into(&self, res: &mut impl std::io::Write) -> Result<(), StorageSerdeError> { - (*self as u64).serialize_into(res) + (u64::try_from(*self).expect("usize should fit in u64")).serialize_into(res) } fn deserialize_from(bytes: &mut impl std::io::Read) -> Option { @@ -930,7 +931,7 @@ impl StorageSerde for BlockNumber { } fn deserialize_from(bytes: &mut impl std::io::Read) -> Option { - Some(BlockNumber(u32::deserialize_from(bytes)? as u64)) + Some(BlockNumber(u32::deserialize_from(bytes)?.into())) } } diff --git a/crates/papyrus_storage/src/state/state_test.rs b/crates/papyrus_storage/src/state/state_test.rs index 142ce009df..d5f2fdeb78 100644 --- a/crates/papyrus_storage/src/state/state_test.rs +++ b/crates/papyrus_storage/src/state/state_test.rs @@ -563,7 +563,7 @@ fn get_nonce_key_serialization() { deprecated_declared_classes: Vec::new(), nonces: IndexMap::from([( contract_address, - Nonce(StarkHash::from(block_number as u128 + 1)), + Nonce(StarkHash::from(u128::from(block_number) + 1)), )]), replaced_classes: IndexMap::new(), }; @@ -598,7 +598,7 @@ fn get_nonce_key_serialization() { println!("{nonce:?}"); let nonce = nonce.unwrap(); - assert_eq!(nonce, Nonce(StarkHash::from(block_number as u128))); + assert_eq!(nonce, Nonce(StarkHash::from(u128::from(block_number)))); } } diff --git a/crates/papyrus_storage/src/utils.rs b/crates/papyrus_storage/src/utils.rs index 01a4ef5d77..508756cc97 100644 --- a/crates/papyrus_storage/src/utils.rs +++ b/crates/papyrus_storage/src/utils.rs @@ -93,11 +93,18 @@ fn dump_declared_classes_table_by_block_range_internal( // TODO(dvir): relocate all the storage metrics in one module and export them (also in other // crates). /// Updates storage metrics about the state of the storage. +#[allow(clippy::as_conversions)] pub fn update_storage_metrics(reader: &StorageReader) -> StorageResult<()> { debug!("updating storage metrics"); gauge!("storage_free_pages_number", reader.db_reader.get_free_pages()? as f64); let info = reader.db_reader.get_db_info()?; - absolute_counter!("storage_last_page_number", info.last_pgno() as u64); - absolute_counter!("storage_last_transaction_index", info.last_txnid() as u64); + absolute_counter!( + "storage_last_page_number", + u64::try_from(info.last_pgno()).expect("usize should fit in u64") + ); + absolute_counter!( + "storage_last_transaction_index", + u64::try_from(info.last_txnid()).expect("usize should fit in u64") + ); Ok(()) } diff --git a/crates/papyrus_storage/src/utils_test.rs b/crates/papyrus_storage/src/utils_test.rs index 9d201440c3..92ef71f12e 100644 --- a/crates/papyrus_storage/src/utils_test.rs +++ b/crates/papyrus_storage/src/utils_test.rs @@ -27,7 +27,7 @@ fn test_dump_declared_classes() { let mut state_diffs = vec![]; let ((reader, mut writer), _temp_dir) = get_test_storage(); for i in 0..5 { - let i_felt = Felt::from(i as u128); + let i_felt = Felt::from(u128::try_from(i).expect("usize should fit in u128")); declared_classes.push(( ClassHash(i_felt), ContractClass { @@ -46,7 +46,7 @@ fn test_dump_declared_classes() { nonces: indexmap!(), replaced_classes: indexmap!(), }); - let block_number = BlockNumber(i as u64); + let block_number = BlockNumber(u64::try_from(i).expect("usize should fit in u64")); let txn = writer.begin_rw_txn().unwrap(); txn.append_state_diff(block_number, state_diffs[i].clone()) .unwrap()