Skip to content

Commit

Permalink
extend multihash_codetable::Code
Browse files Browse the repository at this point in the history
  • Loading branch information
hanabi1224 committed Nov 28, 2024
1 parent 926a580 commit b38c583
Show file tree
Hide file tree
Showing 17 changed files with 85 additions and 44 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ flume = "0.11"
futures = "0.3"
libp2p = { version = "0.54", default-features = false }
libp2p-swarm-test = "0.4"
multihash-codetable = "0.1"
multihash-codetable = { version = "0.1", features = ["blake2b", "blake2s", "blake3", "sha2", "sha3", "strobe"] }
rust2go = "0.3"
tokio = "1"

Expand Down
6 changes: 3 additions & 3 deletions src/blocks/tipset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -767,12 +767,12 @@ mod test {
fn ensure_state_roots_are_equal() {
let h0 = RawBlockHeader {
miner_address: Address::new_id(0),
state_root: Cid::new_v1(DAG_CBOR, MultihashCodeLegacy::Identity.digest(&[])),
state_root: Cid::new_v1(DAG_CBOR, MultihashCode::Identity.digest(&[])),
..Default::default()
};
let h1 = RawBlockHeader {
miner_address: Address::new_id(1),
state_root: Cid::new_v1(DAG_CBOR, MultihashCodeLegacy::Identity.digest(&[1])),
state_root: Cid::new_v1(DAG_CBOR, MultihashCode::Identity.digest(&[1])),
..Default::default()
};
assert_eq!(
Expand All @@ -791,7 +791,7 @@ mod test {
miner_address: Address::new_id(1),
parents: TipsetKey::from(nonempty![Cid::new_v1(
DAG_CBOR,
MultihashCodeLegacy::Identity.digest(&[])
MultihashCode::Identity.digest(&[])
)]),
..Default::default()
};
Expand Down
6 changes: 3 additions & 3 deletions src/chain/store/chain_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -689,11 +689,11 @@ mod tests {

let gen_block = CachingBlockHeader::new(RawBlockHeader {
miner_address: Address::new_id(0),
state_root: Cid::new_v1(DAG_CBOR, MultihashCodeLegacy::Identity.digest(&[])),
state_root: Cid::new_v1(DAG_CBOR, MultihashCode::Identity.digest(&[])),
epoch: 1,
weight: 2u32.into(),
messages: Cid::new_v1(DAG_CBOR, MultihashCodeLegacy::Identity.digest(&[])),
message_receipts: Cid::new_v1(DAG_CBOR, MultihashCodeLegacy::Identity.digest(&[])),
messages: Cid::new_v1(DAG_CBOR, MultihashCode::Identity.digest(&[])),
message_receipts: Cid::new_v1(DAG_CBOR, MultihashCode::Identity.digest(&[])),
..Default::default()
});
let cs =
Expand Down
4 changes: 2 additions & 2 deletions src/db/car/forest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -503,8 +503,8 @@ mod tests {
use crate::utils::multihash::prelude::*;

// Distinct CIDs may map to the same hash value
let cid_a = Cid::new_v1(0, MultihashCodeLegacy::Identity.digest(&[10]));
let cid_b = Cid::new_v1(0, MultihashCodeLegacy::Identity.digest(&[0]));
let cid_a = Cid::new_v1(0, MultihashCode::Identity.digest(&[10]));
let cid_b = Cid::new_v1(0, MultihashCode::Identity.digest(&[0]));
// A and B are _not_ the same...
assert_ne!(cid_a, cid_b);
// ... but they map to the same hash:
Expand Down
9 changes: 3 additions & 6 deletions src/db/car/forest/index/hash.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,18 +123,15 @@ mod tests {
Cid::from_cbor_blake2b256(&"haskell").unwrap(),
10392497608425502268,
),
(Cid::new_v1(0xAB, MultihashCode::Identity.digest(&[])), 170),
(
Cid::new_v1(0xAB, MultihashCodeLegacy::Identity.digest(&[])),
170,
),
(
Cid::new_v1(0xAC, MultihashCodeLegacy::Identity.digest(&[1, 2, 3, 4])),
Cid::new_v1(0xAC, MultihashCode::Identity.digest(&[1, 2, 3, 4])),
171,
),
(
Cid::new_v1(
0xAD,
MultihashCodeLegacy::Identity.digest(&[1, 2, 3, 4, 5, 6, 7, 8]),
MultihashCode::Identity.digest(&[1, 2, 3, 4, 5, 6, 7, 8]),
),
578437695752307371,
),
Expand Down
6 changes: 2 additions & 4 deletions src/db/gc/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -490,10 +490,8 @@ mod test {
let current_epoch = current_epoch as ChainEpochDelta;

let persistent_data = [1, 55];
let persistent_cid = Cid::new_v1(
DAG_CBOR,
MultihashCodeLegacy::Identity.digest(&persistent_data),
);
let persistent_cid =
Cid::new_v1(DAG_CBOR, MultihashCode::Identity.digest(&persistent_data));

// Make sure we run enough epochs to initiate GC.
tester.run_epochs(current_epoch);
Expand Down
2 changes: 1 addition & 1 deletion src/ipld/tests/cbor_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ fn cid_conversions_ipld() {
assert_eq!(to_ipld(cid).unwrap(), Ipld::Link(cid));

// Test with identity hash (different length prefix for cbor)
let cid = Cid::new_v1(DAG_CBOR, MultihashCodeLegacy::Identity.digest(&[1, 2]));
let cid = Cid::new_v1(DAG_CBOR, MultihashCode::Identity.digest(&[1, 2]));
let ipld = ipld!(Ipld::Link(cid));
let ipld2 = to_ipld(cid).unwrap();
assert_eq!(ipld, ipld2);
Expand Down
2 changes: 1 addition & 1 deletion src/ipld/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ fn should_save_block_to_snapshot(cid: Cid) -> bool {
// Don't include identity CIDs.
// We only include raw and dagcbor, for now.
// Raw for "code" CIDs.
if cid.hash().code() == u64::from(MultihashCodeLegacy::Identity) {
if cid.hash().code() == u64::from(MultihashCode::Identity) {
false
} else {
matches!(
Expand Down
2 changes: 1 addition & 1 deletion src/libp2p/hello/message.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ mod tests {
#[test]
fn hello_default_ser() {
let orig_msg = HelloRequest {
genesis_cid: Cid::new_v1(DAG_CBOR, MultihashCodeLegacy::Identity.digest(&[])),
genesis_cid: Cid::new_v1(DAG_CBOR, MultihashCode::Identity.digest(&[])),
heaviest_tipset_weight: Default::default(),
heaviest_tipset_height: Default::default(),
heaviest_tip_set: NonEmpty::of(Default::default()),
Expand Down
6 changes: 1 addition & 5 deletions src/libp2p_bitswap/internals/prefix.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,11 +56,7 @@ impl Prefix {

/// Create a CID out of the prefix and some data that will be hashed
pub fn to_cid(&self, data: &[u8]) -> anyhow::Result<Cid> {
let mh = if self.mh_type == 0 {
MultihashCodeLegacy::try_from(self.mh_type)?.digest(data)
} else {
MultihashCode::try_from(self.mh_type)?.digest(data)
};
let mh = MultihashCode::try_from(self.mh_type)?.digest(data);
Ok(Cid::new(self.version, self.codec, mh)?)
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/shim/actors/version.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ static [<$actor:upper _ACTOR_CIDS>]: Lazy<Vec<(u64, Cid)>> = Lazy::new(|| {

// we need to add manually init actors for V0.
if Type::$actor_type == Type::Init {
let init = Cid::new_v1(fvm_ipld_encoding::IPLD_RAW, MultihashCodeLegacy::Identity.digest(b"fil/1/init"));
let init = Cid::new_v1(fvm_ipld_encoding::IPLD_RAW, MultihashCode::Identity.digest(b"fil/1/init"));
actors.push((0, init));
}
actors
Expand Down
2 changes: 1 addition & 1 deletion src/state_migration/nv17/miner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -900,7 +900,7 @@ mod tests {
"verifiedregistry",
"datacap",
] {
let hash = MultihashCodeLegacy::Identity.digest(format!("{prefix}{name}").as_bytes());
let hash = MultihashCode::Identity.digest(format!("{prefix}{name}").as_bytes());
let code_cid = Cid::new_v1(IPLD_RAW, hash);
manifest_data.push((name, code_cid));
}
Expand Down
2 changes: 1 addition & 1 deletion src/state_migration/nv21/miner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -453,7 +453,7 @@ mod tests {
"verifiedregistry",
"datacap",
] {
let hash = MultihashCodeLegacy::Identity.digest(format!("{prefix}{name}").as_bytes());
let hash = MultihashCode::Identity.digest(format!("{prefix}{name}").as_bytes());
let code_cid = Cid::new_v1(IPLD_RAW, hash);
manifest_data.push((name, code_cid));
}
Expand Down
2 changes: 1 addition & 1 deletion src/tool/subcommands/car_cmd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ mod tests {
}

fn invalid_block(msg: &str) -> CarBlock {
let cid = Cid::new_v1(0, MultihashCodeLegacy::Identity.digest(&[]));
let cid = Cid::new_v1(0, MultihashCode::Identity.digest(&[]));
let data = msg.as_bytes().to_vec();
CarBlock { cid, data }
}
Expand Down
12 changes: 3 additions & 9 deletions src/utils/db/car_stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,15 +67,9 @@ impl CarBlock {
}

pub fn validate(&self) -> anyhow::Result<()> {
let actual = match self.cid.hash().code() {
0 => {
let code = MultihashCodeLegacy::try_from(0)?;
Cid::new_v1(self.cid.codec(), code.digest(&self.data))
}
hash_code => {
let code = MultihashCode::try_from(hash_code)?;
Cid::new_v1(self.cid.codec(), code.digest(&self.data))
}
let actual = {
let code = MultihashCode::try_from(self.cid.hash().code())?;
Cid::new_v1(self.cid.codec(), code.digest(&self.data))
};
anyhow::ensure!(
actual == self.cid,
Expand Down
3 changes: 2 additions & 1 deletion src/utils/db/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@
pub mod car_stream;
pub mod car_util;

use crate::utils::multihash::prelude::*;
use anyhow::Context as _;
use cid::Cid;
use fvm_ipld_blockstore::Blockstore;
use fvm_ipld_encoding::CborStore;
use fvm_ipld_encoding::{to_vec, DAG_CBOR};
use multihash_codetable::{Code, MultihashDigest as _};
use multihash_codetable::Code;

use serde::ser::Serialize;

Expand Down
61 changes: 58 additions & 3 deletions src/utils/multihash.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,72 @@
//!

pub mod prelude {
pub use super::MultihashCodeLegacy;
pub use multihash_codetable::{Code as MultihashCode, MultihashDigest as _};
pub use super::MultihashCode;
pub use multihash_codetable::MultihashDigest as _;
}

use multihash_derive::MultihashDigest;

/// Extends [`multihash_codetable::Code`] with `Identity`
#[derive(Clone, Copy, Debug, Eq, MultihashDigest, PartialEq)]
#[mh(alloc_size = 64)]
pub enum MultihashCodeLegacy {
pub enum MultihashCode {
#[mh(code = 0x0, hasher = IdentityHasher::<64>)]
Identity,
/// SHA-256 (32-byte hash size)
#[mh(code = 0x12, hasher = multihash_codetable::Sha2_256)]
Sha2_256,
/// SHA-512 (64-byte hash size)
#[mh(code = 0x13, hasher = multihash_codetable::Sha2_512)]
Sha2_512,
/// SHA3-224 (28-byte hash size)
#[mh(code = 0x17, hasher = multihash_codetable::Sha3_224)]
Sha3_224,
/// SHA3-256 (32-byte hash size)
#[mh(code = 0x16, hasher = multihash_codetable::Sha3_256)]
Sha3_256,
/// SHA3-384 (48-byte hash size)
#[mh(code = 0x15, hasher = multihash_codetable::Sha3_384)]
Sha3_384,
/// SHA3-512 (64-byte hash size)
#[mh(code = 0x14, hasher = multihash_codetable::Sha3_512)]
Sha3_512,
/// Keccak-224 (28-byte hash size)
#[mh(code = 0x1a, hasher = multihash_codetable::Keccak224)]
Keccak224,
/// Keccak-256 (32-byte hash size)
#[mh(code = 0x1b, hasher = multihash_codetable::Keccak256)]
Keccak256,
/// Keccak-384 (48-byte hash size)
#[mh(code = 0x1c, hasher = multihash_codetable::Keccak384)]
Keccak384,
/// Keccak-512 (64-byte hash size)
#[mh(code = 0x1d, hasher = multihash_codetable::Keccak512)]
Keccak512,
/// BLAKE2b-256 (32-byte hash size)
#[mh(code = 0xb220, hasher = multihash_codetable::Blake2b256)]
Blake2b256,
/// BLAKE2b-512 (64-byte hash size)
#[mh(code = 0xb240, hasher = multihash_codetable::Blake2b512)]
Blake2b512,
/// BLAKE2s-128 (16-byte hash size)
#[mh(code = 0xb250, hasher = multihash_codetable::Blake2s128)]
Blake2s128,
/// BLAKE2s-256 (32-byte hash size)
#[mh(code = 0xb260, hasher = multihash_codetable::Blake2s256)]
Blake2s256,
/// BLAKE3-256 (32-byte hash size)
#[mh(code = 0x1e, hasher = multihash_codetable::Blake3_256)]
Blake3_256,
/// RIPEMD-160 (20-byte hash size)
#[mh(code = 0x1053, hasher = multihash_codetable::Ripemd160)]
Ripemd160,
/// RIPEMD-256 (32-byte hash size)
#[mh(code = 0x1054, hasher = multihash_codetable::Ripemd256)]
Ripemd256,
/// RIPEMD-320 (40-byte hash size)
#[mh(code = 0x1055, hasher = multihash_codetable::Ripemd320)]
Ripemd320,
}

/// Identity hasher with a maximum size.
Expand Down

0 comments on commit b38c583

Please sign in to comment.