Skip to content

Commit

Permalink
Implement Storable for LeafNode (#376)
Browse files Browse the repository at this point in the history
  • Loading branch information
richardpringle authored Nov 29, 2023
1 parent 31a57de commit e36b312
Show file tree
Hide file tree
Showing 3 changed files with 154 additions and 73 deletions.
79 changes: 10 additions & 69 deletions firewood/src/merkle/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ pub use partial_path::PartialPath;
use crate::merkle::to_nibble_array;
use crate::nibbles::Nibbles;

use super::{from_nibbles, TrieHash, TRIE_HASH_LEN};
use super::{TrieHash, TRIE_HASH_LEN};

bitflags! {
// should only ever be the size of a nibble
Expand Down Expand Up @@ -462,48 +462,8 @@ impl Storable for Node {
}

NodeTypeId::Leaf => {
let leaf_header_size = 1 + 4;
let node_raw = mem.get_view(addr + Meta::SIZE, leaf_header_size).ok_or(
ShaleError::InvalidCacheView {
offset: addr + Meta::SIZE,
size: leaf_header_size,
},
)?;

let mut cur = Cursor::new(node_raw.as_deref());
let mut buff = [0; 4];
cur.read_exact(&mut buff[..1])?;

let path_len = buff[0] as u64;

cur.read_exact(&mut buff)?;

let data_len = u32::from_le_bytes(buff) as u64;
let remainder = mem
.get_view(
addr + Meta::SIZE + leaf_header_size as usize,
path_len + data_len,
)
.ok_or(ShaleError::InvalidCacheView {
offset: addr + Meta::SIZE + leaf_header_size as usize,
size: path_len + data_len,
})?;

let nibbles: Vec<_> = remainder
.as_deref()
.into_iter()
.take(path_len as usize)
.flat_map(to_nibble_array)
.collect();

let (path, _) = PartialPath::decode(&nibbles);
let data = Data(remainder.as_deref()[path_len as usize..].to_vec());

let node = Self::new_from_hash(
root_hash,
is_encoded_longer_than_hash_len,
NodeType::Leaf(LeafNode { path, data }),
);
let inner = NodeType::Leaf(LeafNode::deserialize(offset, mem)?);
let node = Self::new_from_hash(root_hash, is_encoded_longer_than_hash_len, inner);

Ok(node)
}
Expand All @@ -517,15 +477,8 @@ impl Storable for Node {
// TODO: add path
n.serialized_len()
}
NodeType::Extension(n) => {
1 + 8
+ n.path.serialized_len()
+ match n.chd_encoded() {
Some(v) => 1 + v.len() as u64,
None => 1,
}
}
NodeType::Leaf(n) => 1 + 4 + n.path.serialized_len() + n.data.len() as u64,
NodeType::Extension(n) => n.serialized_len(),
NodeType::Leaf(n) => n.serialized_len(),
}
}

Expand Down Expand Up @@ -556,7 +509,7 @@ impl Storable for Node {
match &self.inner {
NodeType::Branch(n) => {
// TODO: add path
cur.write_all(&[type_id::NodeTypeId::Branch as u8]).unwrap();
cur.write_all(&[type_id::NodeTypeId::Branch as u8])?;

let pos = cur.position() as usize;

Expand All @@ -566,29 +519,17 @@ impl Storable for Node {
NodeType::Extension(n) => {
cur.write_all(&[type_id::NodeTypeId::Extension as u8])?;

let path: Vec<u8> = from_nibbles(&n.path.encode(false)).collect();

cur.write_all(&[path.len() as u8])?;
cur.write_all(&n.child.to_le_bytes())?;
cur.write_all(&path)?;

if let Some(encoded) = n.chd_encoded() {
cur.write_all(&[encoded.len() as u8])?;
cur.write_all(encoded)?;
}
let pos = cur.position() as usize;

Ok(())
n.serialize(&mut cur.get_mut()[pos..])
}

NodeType::Leaf(n) => {
cur.write_all(&[type_id::NodeTypeId::Leaf as u8])?;

let path: Vec<u8> = from_nibbles(&n.path.encode(true)).collect();
let pos = cur.position() as usize;

cur.write_all(&[path.len() as u8])?;
cur.write_all(&(n.data.len() as u32).to_le_bytes())?;
cur.write_all(&path)?;
cur.write_all(&n.data).map_err(ShaleError::Io)
n.serialize(&mut cur.get_mut()[pos..])
}
}
}
Expand Down
53 changes: 51 additions & 2 deletions firewood/src/merkle/node/extension.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,15 @@ use bincode::Options;
use super::{Encoded, Node};
use crate::{
merkle::{from_nibbles, PartialPath, TRIE_HASH_LEN},
shale::{DiskAddress, ShaleStore},
shale::{DiskAddress, ShaleStore, Storable},
};
use std::fmt::{Debug, Error as FmtError, Formatter};
use std::{
fmt::{Debug, Error as FmtError, Formatter},
io::{Cursor, Write},
mem::size_of,
};

type DataLen = u8;

#[derive(PartialEq, Eq, Clone)]
pub struct ExtNode {
Expand Down Expand Up @@ -87,3 +93,46 @@ impl ExtNode {
&mut self.child_encoded
}
}

impl Storable for ExtNode {
fn serialized_len(&self) -> u64 {
let path_len_size = size_of::<DataLen>() as u64;
let path_len = self.path.serialized_len();
let child_len = DiskAddress::MSIZE;
let encoded_len_size = size_of::<DataLen>() as u64;
let encoded_len = self
.child_encoded
.as_ref()
.map(|v| v.len() as u64)
.unwrap_or(0);

path_len_size + path_len + child_len + encoded_len_size + encoded_len
}

fn serialize(&self, to: &mut [u8]) -> Result<(), crate::shale::ShaleError> {
let mut cursor = Cursor::new(to);

let path: Vec<u8> = from_nibbles(&self.path.encode(false)).collect();

cursor.write_all(&[path.len() as DataLen])?;
cursor.write_all(&self.child.to_le_bytes())?;
cursor.write_all(&path)?;

if let Some(encoded) = self.chd_encoded() {
cursor.write_all(&[encoded.len() as DataLen])?;
cursor.write_all(encoded)?;
}

Ok(())
}

fn deserialize<T: crate::shale::CachedStore>(
_addr: usize,
_mem: &T,
) -> Result<Self, crate::shale::ShaleError>
where
Self: Sized,
{
todo!()
}
}
95 changes: 93 additions & 2 deletions firewood/src/merkle/node/leaf.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,25 @@
// Copyright (C) 2023, Ava Labs, Inc. All rights reserved.
// See the file LICENSE.md for licensing terms.

use std::fmt::{Debug, Error as FmtError, Formatter};
use std::{
fmt::{Debug, Error as FmtError, Formatter},
io::{Cursor, Read, Write},
mem::size_of,
};

use bincode::Options;

use super::{Data, Encoded};
use crate::merkle::{from_nibbles, PartialPath};
use crate::{
merkle::{from_nibbles, to_nibble_array, PartialPath},
shale::{ShaleError::InvalidCacheView, Storable},
};

pub const SIZE: usize = 2;

type PathLen = u8;
type DataLen = u32;

#[derive(PartialEq, Eq, Clone)]
pub struct LeafNode {
pub(crate) path: PartialPath,
Expand All @@ -23,6 +33,9 @@ impl Debug for LeafNode {
}

impl LeafNode {
const PATH_LEN_SIZE: u64 = size_of::<PathLen>() as u64;
const DATA_LEN_SIZE: u64 = size_of::<DataLen>() as u64;

pub fn new<P: Into<PartialPath>, D: Into<Data>>(path: P, data: D) -> Self {
Self {
path: path.into(),
Expand Down Expand Up @@ -51,6 +64,84 @@ impl LeafNode {
}
}

impl Storable for LeafNode {
fn serialized_len(&self) -> u64 {
let path_len_size = size_of::<PathLen>() as u64;
let path_len = self.path.serialized_len();
let data_len_size = size_of::<DataLen>() as u64;
let data_len = self.data.len() as u64;

path_len_size + path_len + data_len_size + data_len
}

fn serialize(&self, to: &mut [u8]) -> Result<(), crate::shale::ShaleError> {
let mut cursor = Cursor::new(to);

let path: Vec<u8> = from_nibbles(&self.path.encode(true)).collect();

cursor.write_all(&[path.len() as PathLen])?;

let data_len = self.data.len() as DataLen;
cursor.write_all(&data_len.to_le_bytes())?;

cursor.write_all(&path)?;
cursor.write_all(&self.data)?;

Ok(())
}

fn deserialize<T: crate::shale::CachedStore>(
mut offset: usize,
mem: &T,
) -> Result<Self, crate::shale::ShaleError>
where
Self: Sized,
{
let header_size = Self::PATH_LEN_SIZE + Self::DATA_LEN_SIZE;

let node_header_raw = mem
.get_view(offset, header_size)
.ok_or(InvalidCacheView {
offset,
size: header_size,
})?
.as_deref();

offset += header_size as usize;

let mut cursor = Cursor::new(node_header_raw);
let mut buf = [0u8; Self::DATA_LEN_SIZE as usize];

let path_len = {
let buf = &mut buf[..Self::PATH_LEN_SIZE as usize];
cursor.read_exact(buf)?;
buf[0] as u64
};

let data_len = {
cursor.read_exact(buf.as_mut())?;
DataLen::from_le_bytes(buf) as u64
};

let size = path_len + data_len;
let remainder = mem
.get_view(offset, size)
.ok_or(InvalidCacheView { offset, size })?
.as_deref();

let (path, data) = remainder.split_at(path_len as usize);

let path = {
let nibbles: Vec<u8> = path.iter().copied().flat_map(to_nibble_array).collect();
PartialPath::decode(&nibbles).0
};

let data = Data(data.to_vec());

Ok(Self::new(path, data))
}
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down

0 comments on commit e36b312

Please sign in to comment.