Skip to content

Commit

Permalink
Finish deserialization of extension-node (#377)
Browse files Browse the repository at this point in the history
  • Loading branch information
richardpringle authored Nov 30, 2023
1 parent e36b312 commit 8799910
Show file tree
Hide file tree
Showing 4 changed files with 109 additions and 94 deletions.
82 changes: 3 additions & 79 deletions firewood/src/merkle/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize};
use sha3::{Digest, Keccak256};
use std::{
fmt::Debug,
io::{Cursor, Read, Write},
io::{Cursor, Write},
mem::size_of,
sync::{
atomic::{AtomicBool, Ordering},
Expand All @@ -27,7 +27,6 @@ pub use extension::ExtNode;
pub use leaf::{LeafNode, SIZE as LEAF_NODE_SIZE};
pub use partial_path::PartialPath;

use crate::merkle::to_nibble_array;
use crate::nibbles::Nibbles;

use super::{TrieHash, TRIE_HASH_LEN};
Expand Down Expand Up @@ -339,9 +338,7 @@ mod type_id {
use type_id::NodeTypeId;

impl Storable for Node {
fn deserialize<T: CachedStore>(addr: usize, mem: &T) -> Result<Self, ShaleError> {
let mut offset = addr;

fn deserialize<T: CachedStore>(mut offset: usize, mem: &T) -> Result<Self, ShaleError> {
let meta_raw =
mem.get_view(offset, Meta::SIZE as u64)
.ok_or(ShaleError::InvalidCacheView {
Expand Down Expand Up @@ -382,80 +379,7 @@ impl Storable for Node {
}

NodeTypeId::Extension => {
let ext_header_size = 1 + 8;

let node_raw = mem.get_view(addr + Meta::SIZE, ext_header_size).ok_or(
ShaleError::InvalidCacheView {
offset: addr + Meta::SIZE,
size: ext_header_size,
},
)?;

let mut cur = Cursor::new(node_raw.as_deref());
let mut buff = [0; 8];

cur.read_exact(&mut buff[..1])?;
let path_len = buff[0] as u64;

cur.read_exact(&mut buff)?;
let ptr = u64::from_le_bytes(buff);

let nibbles: Vec<u8> = mem
.get_view(addr + Meta::SIZE + ext_header_size as usize, path_len)
.ok_or(ShaleError::InvalidCacheView {
offset: addr + Meta::SIZE + ext_header_size as usize,
size: path_len,
})?
.as_deref()
.into_iter()
.flat_map(to_nibble_array)
.collect();

let (path, _) = PartialPath::decode(&nibbles);

let mut buff = [0_u8; 1];

let encoded_len_raw = mem
.get_view(
addr + Meta::SIZE + ext_header_size as usize + path_len as usize,
1,
)
.ok_or(ShaleError::InvalidCacheView {
offset: addr + Meta::SIZE + ext_header_size as usize + path_len as usize,
size: 1,
})?;

cur = Cursor::new(encoded_len_raw.as_deref());
cur.read_exact(&mut buff)?;

let encoded_len = buff[0] as u64;

let encoded: Option<Vec<u8>> = if encoded_len != 0 {
let emcoded_raw = mem
.get_view(
addr + Meta::SIZE + ext_header_size as usize + path_len as usize + 1,
encoded_len,
)
.ok_or(ShaleError::InvalidCacheView {
offset: addr
+ Meta::SIZE
+ ext_header_size as usize
+ path_len as usize
+ 1,
size: encoded_len,
})?;

Some(emcoded_raw.as_deref()[0..].to_vec())
} else {
None
};

let inner = NodeType::Extension(ExtNode {
path,
child: DiskAddress::from(ptr as usize),
child_encoded: encoded,
});

let inner = NodeType::Extension(ExtNode::deserialize(offset, mem)?);
let node = Self::new_from_hash(root_hash, is_encoded_longer_than_hash_len, inner);

Ok(node)
Expand Down
12 changes: 10 additions & 2 deletions firewood/src/merkle/node/branch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -288,13 +288,21 @@ impl Storable for BranchNode {
for child in &mut children_encoded {
const ENCODED_CHILD_LEN_SIZE: u64 = size_of::<EncodedChildLen>() as u64;

let len = mem
let len_raw = mem
.get_view(addr, ENCODED_CHILD_LEN_SIZE)
.ok_or(ShaleError::InvalidCacheView {
offset: addr,
size: ENCODED_CHILD_LEN_SIZE,
})?
.as_deref()[0] as u64;
.as_deref();

let mut cursor = Cursor::new(len_raw);

let len = {
let mut buf = [0; ENCODED_CHILD_LEN_SIZE as usize];
cursor.read_exact(buf.as_mut())?;
EncodedChildLen::from_le_bytes(buf) as u64
};

addr += ENCODED_CHILD_LEN_SIZE as usize;

Expand Down
101 changes: 92 additions & 9 deletions firewood/src/merkle/node/extension.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,16 @@ use bincode::Options;

use super::{Encoded, Node};
use crate::{
merkle::{from_nibbles, PartialPath, TRIE_HASH_LEN},
shale::{DiskAddress, ShaleStore, Storable},
merkle::{from_nibbles, to_nibble_array, PartialPath, TRIE_HASH_LEN},
shale::{DiskAddress, ShaleError::InvalidCacheView, ShaleStore, Storable},
};
use std::{
fmt::{Debug, Error as FmtError, Formatter},
io::{Cursor, Write},
io::{Cursor, Read, Write},
mem::size_of,
};

type PathLen = u8;
type DataLen = u8;

#[derive(PartialEq, Eq, Clone)]
Expand All @@ -35,6 +36,9 @@ impl Debug for ExtNode {
}

impl ExtNode {
const PATH_LEN_SIZE: u64 = size_of::<PathLen>() as u64;
const DATA_LEN_SIZE: u64 = size_of::<DataLen>() as u64;

pub(super) fn encode<S: ShaleStore<Node>>(&self, store: &S) -> Vec<u8> {
let mut list = <[Encoded<Vec<u8>>; 2]>::default();
list[0] = Encoded::Data(
Expand Down Expand Up @@ -96,10 +100,13 @@ impl ExtNode {

impl Storable for ExtNode {
fn serialized_len(&self) -> u64 {
let path_len_size = size_of::<DataLen>() as u64;
let path_len_size = Self::PATH_LEN_SIZE;
let path_len = self.path.serialized_len();
let child_len = DiskAddress::MSIZE;
let encoded_len_size = size_of::<DataLen>() as u64;
// TODO:
// this seems wrong to always include this byte even if there isn't a child
// but it matches the original implementation
let encoded_len_size = Self::DATA_LEN_SIZE;
let encoded_len = self
.child_encoded
.as_ref()
Expand All @@ -114,7 +121,7 @@ impl Storable for ExtNode {

let path: Vec<u8> = from_nibbles(&self.path.encode(false)).collect();

cursor.write_all(&[path.len() as DataLen])?;
cursor.write_all(&[path.len() as PathLen])?;
cursor.write_all(&self.child.to_le_bytes())?;
cursor.write_all(&path)?;

Expand All @@ -127,12 +134,88 @@ impl Storable for ExtNode {
}

fn deserialize<T: crate::shale::CachedStore>(
_addr: usize,
_mem: &T,
mut offset: usize,
mem: &T,
) -> Result<Self, crate::shale::ShaleError>
where
Self: Sized,
{
todo!()
let header_size = Self::PATH_LEN_SIZE + DiskAddress::MSIZE;

let path_and_disk_address = mem
.get_view(offset, header_size)
.ok_or(InvalidCacheView {
offset,
size: header_size,
})?
.as_deref();

offset += header_size as usize;

let mut cursor = Cursor::new(path_and_disk_address);
let mut buf = [0u8; DiskAddress::MSIZE as usize];

let path_len = {
let buf = &mut buf[..Self::PATH_LEN_SIZE as usize];
cursor.read_exact(buf)?;
buf[0] as u64
};

let disk_address = {
cursor.read_exact(buf.as_mut())?;
DiskAddress::from(u64::from_le_bytes(buf) as usize)
};

let path = mem
.get_view(offset, path_len)
.ok_or(InvalidCacheView {
offset,
size: path_len,
})?
.as_deref();

offset += path_len as usize;

let path: Vec<u8> = path.into_iter().flat_map(to_nibble_array).collect();

let path = PartialPath::decode(&path).0;

let encoded_len_raw = mem
.get_view(offset, Self::DATA_LEN_SIZE)
.ok_or(InvalidCacheView {
offset,
size: Self::DATA_LEN_SIZE,
})?
.as_deref();

offset += Self::DATA_LEN_SIZE as usize;

let mut cursor = Cursor::new(encoded_len_raw);

let encoded_len = {
let mut buf = [0u8; Self::DATA_LEN_SIZE as usize];
cursor.read_exact(buf.as_mut())?;
DataLen::from_le_bytes(buf) as u64
};

let encoded = if encoded_len != 0 {
let encoded = mem
.get_view(offset, encoded_len)
.ok_or(InvalidCacheView {
offset,
size: encoded_len,
})?
.as_deref();

encoded.into()
} else {
None
};

Ok(ExtNode {
path,
child: disk_address,
child_encoded: encoded,
})
}
}
8 changes: 4 additions & 4 deletions firewood/src/merkle/node/leaf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,15 +110,15 @@ impl Storable for LeafNode {
offset += header_size as usize;

let mut cursor = Cursor::new(node_header_raw);
let mut buf = [0u8; Self::DATA_LEN_SIZE as usize];

let path_len = {
let buf = &mut buf[..Self::PATH_LEN_SIZE as usize];
cursor.read_exact(buf)?;
buf[0] as u64
let mut buf = [0u8; Self::PATH_LEN_SIZE as usize];
cursor.read_exact(buf.as_mut())?;
PathLen::from_le_bytes(buf) as u64
};

let data_len = {
let mut buf = [0u8; Self::DATA_LEN_SIZE as usize];
cursor.read_exact(buf.as_mut())?;
DataLen::from_le_bytes(buf) as u64
};
Expand Down

0 comments on commit 8799910

Please sign in to comment.