Skip to content

Commit

Permalink
chore: replace libipld with ipld-core (#5013)
Browse files Browse the repository at this point in the history
  • Loading branch information
hanabi1224 authored Nov 28, 2024
1 parent 82c1e2a commit 0fc577e
Show file tree
Hide file tree
Showing 20 changed files with 147 additions and 109 deletions.
147 changes: 81 additions & 66 deletions Cargo.lock

Large diffs are not rendered by default.

7 changes: 3 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ cfg-if = "1"
cfg-vis = "0.3"
chrono = { version = "0.4", default-features = false, features = ["clock"] }
cid = { workspace = true }
cid_0_11 = { package = "cid", version = "0.11", default-features = false, features = ["std"] }
clap = { version = "4", features = ["derive"] }
colored = "2.0"
crypto_secretbox = "0.1"
Expand Down Expand Up @@ -93,16 +94,14 @@ humantime = "2"
indexmap = { version = "2", features = ["serde"] }
indicatif = { version = "0.17", features = ["tokio"] }
integer-encoding = "4.0"
ipld-core = { version = "0.4", features = ["serde", "arb"] }
is-terminal = "0.4"
itertools = "0.13"
jsonrpsee = { version = "0.24", features = ["server", "ws-client", "http-client"] }
jsonwebtoken = "9"
keccak-hash = "0.11"
kubert-prometheus-process = "0.1"
lazy-regex = "3"
libipld = { version = "0.16", default-features = false, features = ["dag-cbor", "dag-json", "derive", "serde-codec"] }
libipld-core = { version = "0.16", features = ['arb', 'serde-codec'] }
libipld-macro = "0.16"
libp2p = { workspace = true, features = [
'autonat',
'gossipsub',
Expand Down Expand Up @@ -171,7 +170,7 @@ schemars = { version = "0.8", features = ["chrono", "uuid1"] }
scopeguard = "1"
semver = "1"
serde = { version = "1", default-features = false, features = ["derive"] }
serde_ipld_dagcbor = "0.4"
serde_ipld_dagcbor = "0.6"
serde_json = { version = "1.0", features = ["raw_value"] }
serde_tuple = "1"
serde_with = { version = "3", features = ["chrono_0_4"] }
Expand Down
1 change: 1 addition & 0 deletions interop-tests/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
// SPDX-License-Identifier: Apache-2.0, MIT

fn main() {
println!("cargo::rerun-if-changed=src/tests/go_app");
rust2go::Builder::default()
.with_go_src("./src/tests/go_app")
.with_regen_arg(rust2go::RegenArgs {
Expand Down
4 changes: 2 additions & 2 deletions src/documentation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@
/// which is a representation of all Filecoin state at a point in time.
/// For each actor, the `StateTree` holds the CID for its state: [`ActorState.state`](fvm4::state_tree::ActorState::state).
///
/// Actor state is serialized and stored as [`Ipld`](libipld::Ipld).
/// Actor state is serialized and stored as [`Ipld`](ipld_core::ipld::Ipld).
/// Think of this as "JSON with links ([`Cid`](cid::Cid)s)".
/// So the `cron` actor's state mentioned above will be ultimately serialized into `Ipld`
/// and stored in the `StateStore`, per
Expand Down Expand Up @@ -169,7 +169,7 @@
///
/// With respect to a particular IPLD [`Blockstore`](fvm_ipld_blockstore::Blockstore):
/// - An item such a list is _fully inhabited_ if all its recursive
/// [`Ipld::Link`](libipld::Ipld::Link)s exist in the blockstore.
/// [`Ipld::Link`](ipld_core::ipld::Ipld::Link)s exist in the blockstore.
/// - Otherwise, an item is only _partially inhabited_.
/// The links are said to be "dead links".
///
Expand Down
2 changes: 1 addition & 1 deletion src/ipld/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
pub mod selector;
pub mod util;

pub use libipld_core::ipld::Ipld;
pub use ipld_core::ipld::Ipld;
pub use util::*;

#[cfg(test)]
Expand Down
10 changes: 6 additions & 4 deletions src/ipld/tests/cbor_test.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// Copyright 2019-2024 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT

use crate::{ipld::Ipld, utils::encoding::from_slice_with_fallback};
use crate::utils::encoding::from_slice_with_fallback;
use cid::{
multihash::{
Code::{Blake2b256, Identity},
Expand All @@ -10,19 +10,19 @@ use cid::{
Cid,
};
use fvm_ipld_encoding::{to_vec, DAG_CBOR};
use libipld_core::serde::to_ipld;
use libipld_macro::ipld;
use ipld_core::{ipld, ipld::Ipld, serde::to_ipld};
use serde::{Deserialize, Serialize};

#[derive(Serialize, Deserialize, Clone)]
struct TestStruct {
name: String,
details: Cid,
details: cid_0_11::Cid,
}

#[test]
fn encode_new_type() {
let details = Cid::new_v1(DAG_CBOR, Blake2b256.digest(&[1, 2, 3]));
let details = crate::utils::cid::cid_10_to_11(&details);
let name = "Test".to_string();
let t_struct = TestStruct {
name: name.clone(),
Expand All @@ -46,6 +46,7 @@ fn encode_new_type() {
#[test]
fn cid_conversions_ipld() {
let cid = Cid::new_v1(DAG_CBOR, Blake2b256.digest(&[1, 2, 3]));
let cid = crate::utils::cid::cid_10_to_11(&cid);
let m_s = TestStruct {
name: "s".to_owned(),
details: cid,
Expand All @@ -63,6 +64,7 @@ fn cid_conversions_ipld() {

// Test with identity hash (different length prefix for cbor)
let cid = Cid::new_v1(DAG_CBOR, Identity.digest(&[1, 2]));
let cid = crate::utils::cid::cid_10_to_11(&cid);
let ipld = ipld!(Ipld::Link(cid));
let ipld2 = to_ipld(cid).unwrap();
assert_eq!(ipld, ipld2);
Expand Down
4 changes: 2 additions & 2 deletions src/ipld/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ impl DfsIter {

impl From<Cid> for DfsIter {
fn from(cid: Cid) -> Self {
DfsIter::new(Ipld::Link(cid))
DfsIter::new(Ipld::Link(crate::utils::cid::cid_10_to_11(&cid)))
}
}

Expand Down Expand Up @@ -181,7 +181,7 @@ impl<DB: Blockstore, T: Iterator<Item = Tipset> + Unpin> Stream for ChainStream<

let ipld_to_cid = |ipld| {
if let Ipld::Link(cid) = ipld {
return Some(cid);
return Some(crate::utils::cid::cid_11_to_10(&cid));
}
None
};
Expand Down
6 changes: 3 additions & 3 deletions src/lotus_json/ipld.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,15 @@
//! compatible, we cannot switch to `libipld-json`.
//!
//! # Tech debt
//! - The real way to do this is to implement [`libipld::codec`] bits appropriately,
//! - The real way to do this is to implement [`ipld_core::codec::Codec`] bits appropriately,
//! or embrace using our own struct.
use std::{collections::BTreeMap, fmt};

use super::*;

use ::cid::multibase;
use libipld::{ipld, Ipld};
use ipld_core::{ipld, ipld::Ipld};
use serde::de;

#[derive(Serialize, Deserialize, JsonSchema)]
Expand Down Expand Up @@ -313,7 +313,7 @@ quickcheck::quickcheck! {
/// Error: "called `Result::unwrap()` on an `Err` value: Error(\"Input too short\", line: 1, column: 52)"',
/// ```
/// The actual error message is a little ambiguous with regards to the cause
/// because [`libipld` has a custom debug implementation][unhelpful]
/// because [`ipld_core`] has a custom debug implementation [unhelpful]
///
/// Here's what the minimal test case (or simply another bug) is after trying to understand the above.
///
Expand Down
4 changes: 2 additions & 2 deletions src/rpc/methods/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@ use cid::Cid;
use fvm_ipld_blockstore::Blockstore;
use fvm_ipld_encoding::{CborStore, RawBytes};
use hex::ToHex;
use ipld_core::ipld::Ipld;
use jsonrpsee::types::error::ErrorObjectOwned;
use jsonrpsee::types::Params;
use libipld::Ipld;
use num::BigInt;
use once_cell::sync::Lazy;
use schemars::JsonSchema;
Expand Down Expand Up @@ -319,7 +319,7 @@ impl RpcMethod<2> for ChainStatObj {
{
for ipld in DfsIter::new(ipld) {
if let Ipld::Link(cid) = ipld {
queue.push_back(cid);
queue.push_back(crate::utils::cid::cid_11_to_10(&cid));
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/rpc/methods/eth.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ use cbor4ii::core::Value;
use cid::Cid;
use fvm_ipld_blockstore::Blockstore;
use fvm_ipld_encoding::{RawBytes, CBOR, DAG_CBOR, IPLD_RAW};
use ipld_core::ipld::Ipld;
use itertools::Itertools;
use libipld_core::ipld::Ipld;
use num::{BigInt, Zero as _};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
Expand Down
2 changes: 1 addition & 1 deletion src/rpc/methods/eth/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

use super::*;
use anyhow::ensure;
use libipld::error::SerdeError;
use ipld_core::serde::SerdeError;
use libsecp256k1::util::FULL_PUBLIC_KEY_SIZE;
use serde::de::{value::StringDeserializer, IntoDeserializer};
use std::hash::Hash;
Expand Down
17 changes: 13 additions & 4 deletions src/rpc/methods/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,8 @@ use futures::StreamExt;
use fvm_ipld_blockstore::Blockstore;
use fvm_ipld_encoding::{CborStore, DAG_CBOR};
pub use fvm_shared3::sector::StoragePower;
use ipld_core::ipld::Ipld;
use jsonrpsee::types::error::ErrorObject;
use libipld_core::ipld::Ipld;
use num_bigint::BigInt;
use num_traits::Euclid;
use nunny::{vec as nonempty, Vec as NonEmpty};
Expand Down Expand Up @@ -1219,7 +1219,14 @@ impl RpcMethod<2> for StateFetchRoot {

// When walking an Ipld graph, we're only interested in the DAG_CBOR encoded nodes.
let mut get_ipld_link = |ipld: &Ipld| match ipld {
&Ipld::Link(cid) if cid.codec() == DAG_CBOR && seen.insert(cid) => Some(cid),
&Ipld::Link(cid) if cid.codec() == DAG_CBOR => {
let cid = crate::utils::cid::cid_11_to_10(&cid);
if seen.insert(cid) {
Some(cid)
} else {
None
}
}
_ => None,
};

Expand All @@ -1228,7 +1235,9 @@ impl RpcMethod<2> for StateFetchRoot {
// depth-first-search pauses until one of the work tasks returns. The memory usage of this
// algorithm is dominated by the set of seen CIDs and the 'dfs' stack is not expected to grow to
// more than 1000 elements (even when walking tens of millions of nodes).
let dfs = Arc::new(Mutex::new(vec![Ipld::Link(root_cid)]));
let dfs = Arc::new(Mutex::new(vec![Ipld::Link(
crate::utils::cid::cid_10_to_11(&root_cid),
)]));
let mut to_be_fetched = vec![];

// Loop until: No more items in `dfs` AND no running worker tasks.
Expand Down Expand Up @@ -1496,7 +1505,7 @@ impl RpcMethod<2> for StateReadState {
balance: actor.balance.clone().into(),
code: actor.code,
state: crate::rpc::types::ApiState {
builtin_actors: Ipld::Link(state),
builtin_actors: Ipld::Link(crate::utils::cid::cid_10_to_11(&state)),
},
})
}
Expand Down
2 changes: 1 addition & 1 deletion src/rpc/types/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ use fil_actors_shared::fvm_ipld_bitfield::BitField;
use fvm_ipld_encoding::RawBytes;
use fvm_shared4::piece::PaddedPieceSize;
use fvm_shared4::ActorID;
use libipld_core::ipld::Ipld;
use ipld_core::ipld::Ipld;
use num_bigint::BigInt;
use nunny::Vec as NonEmpty;
use schemars::JsonSchema;
Expand Down
6 changes: 3 additions & 3 deletions src/statediff/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use fil_actor_interface::{
reward::State as RewardState, system::State as SystemState,
};
use fvm_ipld_blockstore::Blockstore;
use libipld_core::ipld::Ipld;
use ipld_core::ipld::Ipld;
use resolve::resolve_cids_recursive;
use serde::{Deserialize, Serialize};
use similar::{ChangeTag, TextDiff};
Expand All @@ -47,8 +47,8 @@ fn actor_to_resolved(
actor: &ActorState,
depth: Option<u64>,
) -> ActorStateResolved {
let resolved =
resolve_cids_recursive(bs, &actor.state, depth).unwrap_or(Ipld::Link(actor.state));
let resolved = resolve_cids_recursive(bs, &actor.state, depth)
.unwrap_or(Ipld::Link(crate::utils::cid::cid_10_to_11(&actor.state)));
ActorStateResolved {
state: resolved,
code: actor.code,
Expand Down
4 changes: 2 additions & 2 deletions src/statediff/resolve.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use cid::Cid;
use fvm_ipld_blockstore::Blockstore;
use fvm_ipld_encoding::CborStore;
use fvm_ipld_encoding::DAG_CBOR;
use libipld_core::ipld::Ipld;
use ipld_core::ipld::Ipld;

/// Resolves link to recursively resolved [`Ipld`] with no hash links.
pub fn resolve_cids_recursive<BS>(
Expand Down Expand Up @@ -47,7 +47,7 @@ where
}
Ipld::Link(cid) => {
if cid.codec() == DAG_CBOR {
if let Some(mut x) = bs.get_cbor(cid)? {
if let Some(mut x) = bs.get_cbor(&crate::utils::cid::cid_11_to_10(cid))? {
resolve_ipld(bs, &mut x, depth)?;
*ipld = x;
}
Expand Down
3 changes: 1 addition & 2 deletions src/tool/subcommands/api_cmd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ use fil_actors_shared::fvm_ipld_bitfield::BitField;
use fil_actors_shared::v10::runtime::DomainSeparationTag;
use futures::{stream::FuturesUnordered, StreamExt};
use fvm_ipld_blockstore::Blockstore;
use ipld_core::ipld::Ipld;
use itertools::Itertools as _;
use jsonrpsee::types::ErrorCode;
use libp2p::PeerId;
Expand Down Expand Up @@ -1946,8 +1947,6 @@ fn format_as_markdown(results: &[((&'static str, TestSummary, TestSummary), u32)
}

fn validate_message_lookup(req: rpc::Request<MessageLookup>) -> RpcTest {
use libipld_core::ipld::Ipld;

RpcTest::validate(req, |mut forest, mut lotus| {
// TODO(hanabi1224): https://github.com/ChainSafe/forest/issues/3784
forest.return_dec = Ipld::Null;
Expand Down
2 changes: 1 addition & 1 deletion src/tool/subcommands/benchmark_cmd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ pub enum BenchmarkCommands {
/// Snapshot input files (`.car.`, `.car.zst`, `.forest.car.zst`)
#[arg(required = true)]
snapshot_files: Vec<PathBuf>,
/// Whether or not we want to expect [`libipld_core::ipld::Ipld`] data for each block.
/// Whether or not we want to expect [`ipld_core::ipld::Ipld`] data for each block.
#[arg(long)]
inspect: bool,
},
Expand Down
12 changes: 12 additions & 0 deletions src/utils/cid/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,15 @@ pub trait CidCborExt {
}

impl CidCborExt for Cid {}

/// A temporary utility for converting [`cid::Cid`] to [`cid_0_11::Cid`]
pub fn cid_10_to_11(cid: &cid::Cid) -> cid_0_11::Cid {
let bytes = cid.to_bytes();
cid_0_11::Cid::read_bytes(bytes.as_slice()).unwrap()
}

/// A temporary utility for converting [`cid_0_11::Cid`] to [`cid::Cid`]
pub fn cid_11_to_10(cid: &cid_0_11::Cid) -> cid::Cid {
let bytes = cid.to_bytes();
cid::Cid::read_bytes(bytes.as_slice()).unwrap()
}
12 changes: 6 additions & 6 deletions src/utils/encoding/cid_de_cbor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ pub fn extract_cids(cbor_blob: &[u8]) -> anyhow::Result<Vec<Cid>> {
/// vector of [`Cid`].
struct CidVec(Vec<Cid>);

/// [`FilterCids`] traverses an [`libipld_core::ipld::Ipld`] tree, appending [`Cid`]s (and only CIDs) to a single vector.
/// This is much faster than constructing an [`libipld_core::ipld::Ipld`] tree and then performing the filtering.
/// [`FilterCids`] traverses an [`ipld_core::ipld::Ipld`] tree, appending [`Cid`]s (and only CIDs) to a single vector.
/// This is much faster than constructing an [`ipld_core::ipld::Ipld`] tree and then performing the filtering.
struct FilterCids<'a>(&'a mut Vec<Cid>);

impl<'de, 'a> DeserializeSeed<'de> for FilterCids<'a> {
Expand Down Expand Up @@ -187,7 +187,7 @@ mod test {
use cid::Cid;

use fvm_ipld_encoding::DAG_CBOR;
use libipld_core::ipld::Ipld;
use ipld_core::ipld::Ipld;
use quickcheck::{Arbitrary, Gen};
use quickcheck_macros::quickcheck;

Expand All @@ -204,14 +204,14 @@ mod test {
match ipld {
// [`Cid`]s have to be valid in order to be decodable.
Ipld::Link(cid) => {
*cid = Cid::new_v1(
*cid = crate::utils::cid::cid_10_to_11(&Cid::new_v1(
DAG_CBOR,
Blake2b256.digest(&[
u8::arbitrary(g),
u8::arbitrary(g),
u8::arbitrary(g),
]),
)
))
}
Ipld::Map(map) => map.values_mut().for_each(|val| cleanup_ipld(val, g)),
Ipld::List(vec) => vec.iter_mut().for_each(|val| cleanup_ipld(val, g)),
Expand All @@ -238,7 +238,7 @@ mod test {
fn deserialize_various_blobs(ipld: IpldWrapper) {
let ipld_to_cid = |ipld| {
if let Ipld::Link(cid) = ipld {
return Some(cid);
return Some(crate::utils::cid::cid_11_to_10(&cid));
}
None
};
Expand Down
Loading

0 comments on commit 0fc577e

Please sign in to comment.