Skip to content

Commit

Permalink
Keep PendingComponents in da_checker during import_block (#5845)
Browse files Browse the repository at this point in the history
Squashed commit of the following:

commit c051218
Author: ethDreamer <37123614+ethDreamer@users.noreply.github.com>
Date:   Fri May 31 13:38:19 2024 +0200

    Fix tests with DA checker new eviction policy (#34)

commit d75321b
Merge: 7c125b8 7f8b600
Author: realbigsean <seananderson33@GMAIL.com>
Date:   Mon May 27 19:52:41 2024 -0400

    Merge branch 'unstable' of https://github.com/sigp/lighthouse into time_in_da_checker

commit 7c125b8
Author: dapplion <35266934+dapplion@users.noreply.github.com>
Date:   Sat May 25 01:32:02 2024 +0200

    Keep PendingComponents in da_checker during import_block

commit 7136412
Author: dapplion <35266934+dapplion@users.noreply.github.com>
Date:   Sat May 25 01:01:23 2024 +0200

    Simplify BlockProcessStatus

commit dbcd7d1
Author: dapplion <35266934+dapplion@users.noreply.github.com>
Date:   Fri May 24 20:00:05 2024 +0200

    Ensure lookup sync checks caches correctly
  • Loading branch information
jimmygchen committed Jun 3, 2024
1 parent fb790de commit 2ec5de3
Show file tree
Hide file tree
Showing 3 changed files with 64 additions and 1 deletion.
14 changes: 14 additions & 0 deletions beacon_node/beacon_chain/src/beacon_chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3336,6 +3336,20 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
"payload_verification_handle",
)
.await??;

// Remove block components from da_checker AFTER completing block import. Then we can assert
// the following invariant:
// > A valid unfinalized block is either in fork-choice or da_checker.
//
// If we remove the block when it becomes available, there's some time window during
// `import_block` where the block is nowhere. Consumers of the da_checker can handle the
// extend time a block may exist in the da_checker.
//
// If `import_block` errors (only errors with internal errors), the pending components will
// be pruned on data_availability_checker maintenance as finality advances.
self.data_availability_checker
.remove_pending_components(block_root);

Ok(AvailabilityProcessingStatus::Imported(block_root))
}

Expand Down
5 changes: 5 additions & 0 deletions beacon_node/beacon_chain/src/data_availability_checker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,11 @@ impl<T: BeaconChainTypes> DataAvailabilityChecker<T> {
.put_pending_executed_block(executed_block)
}

pub fn remove_pending_components(&self, block_root: Hash256) {
self.availability_cache
.remove_pending_components(block_root)
}

/// Verifies kzg commitments for an RpcBlock, returns a `MaybeAvailableBlock` that may
/// include the fully available block.
///
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -498,6 +498,21 @@ impl<T: BeaconChainTypes> Critical<T> {
}
}

/// Removes and returns the pending_components corresponding to
/// the `block_root` or `None` if it does not exist
pub fn remove_pending_components(&mut self, block_root: Hash256) {
match self.in_memory.pop_entry(&block_root) {
Some { .. } => {}
None => {
// not in memory, is it in the store?
// We don't need to remove the data from the store as we have removed it from
// `store_keys` so we won't go looking for it on disk. The maintenance thread
// will remove it from disk the next time it runs.
self.store_keys.remove(&block_root);
}
}
}

/// Returns the number of pending component entries in memory.
pub fn num_blocks(&self) -> usize {
self.in_memory.len()
Expand Down Expand Up @@ -607,6 +622,11 @@ impl<T: BeaconChainTypes> OverflowLRUCache<T> {
pending_components.merge_blobs(fixed_blobs);

if pending_components.is_available() {
write_lock.put_pending_components(
block_root,
pending_components.clone(),
&self.overflow_store,
)?;
// No need to hold the write lock anymore
drop(write_lock);
pending_components.make_available(|diet_block| {
Expand Down Expand Up @@ -646,6 +666,11 @@ impl<T: BeaconChainTypes> OverflowLRUCache<T> {

// Check if we have all components and entire set is consistent.
if pending_components.is_available() {
write_lock.put_pending_components(
block_root,
pending_components.clone(),
&self.overflow_store,
)?;
// No need to hold the write lock anymore
drop(write_lock);
pending_components.make_available(|diet_block| {
Expand All @@ -661,6 +686,10 @@ impl<T: BeaconChainTypes> OverflowLRUCache<T> {
}
}

pub fn remove_pending_components(&self, block_root: Hash256) {
self.critical.write().remove_pending_components(block_root);
}

/// write all in memory objects to disk
pub fn write_all_to_disk(&self) -> Result<(), AvailabilityCheckError> {
let maintenance_lock = self.maintenance_lock.lock();
Expand Down Expand Up @@ -1195,10 +1224,17 @@ mod test {
matches!(availability, Availability::Available(_)),
"block doesn't have blobs, should be available"
);
assert_eq!(
cache.critical.read().in_memory.len(),
1,
"cache should still have block as it hasn't been imported yet"
);
// remove the blob to simulate successful import
cache.remove_pending_components(root);
assert_eq!(
cache.critical.read().in_memory.len(),
0,
"cache should be empty because we don't have blobs"
"cache should be empty now that block has been imported"
);
} else {
assert!(
Expand Down Expand Up @@ -1263,6 +1299,12 @@ mod test {
"block should be available: {:?}",
availability
);
assert!(
cache.critical.read().in_memory.len() == 1,
"cache should still have available block until import"
);
// remove the blob to simulate successful import
cache.remove_pending_components(root);
assert!(
cache.critical.read().in_memory.is_empty(),
"cache should be empty now that all components available"
Expand Down Expand Up @@ -1378,6 +1420,8 @@ mod test {
.expect("should put blob");
if blob_index == expected_blobs - 1 {
assert!(matches!(availability, Availability::Available(_)));
// remove the block from the cache to simulate import
cache.remove_pending_components(roots[0]);
} else {
// the first block should be brought back into memory
assert!(
Expand Down

0 comments on commit 2ec5de3

Please sign in to comment.