Skip to content

Commit

Permalink
Compute columns in post-PeerDAS checkpoint sync (#6760)
Browse files Browse the repository at this point in the history
Addresses #6026.

Post-PeerDAS the DB expects to have data columns for the finalized block.


  Instead of forcing the user to submit the columns, this PR computes the columns from the blobs that we can already fetch from the checkpointz server or with the existing CLI options.

Note 1: (EDIT) Pruning concern addressed

Note 2: I have not tested this feature

Note 3: @michaelsproul an alternative I recall is to not require the blobs / columns at this point and expect backfill to populate the finalized block
  • Loading branch information
dapplion authored Jan 31, 2025
1 parent e4183f8 commit 027bb97
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 3 deletions.
28 changes: 25 additions & 3 deletions beacon_node/beacon_chain/src/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ use crate::fork_choice_signal::ForkChoiceSignalTx;
use crate::fork_revert::{reset_fork_choice_to_finalization, revert_to_fork_boundary};
use crate::graffiti_calculator::{GraffitiCalculator, GraffitiOrigin};
use crate::head_tracker::HeadTracker;
use crate::kzg_utils::blobs_to_data_column_sidecars;
use crate::light_client_server_cache::LightClientServerCache;
use crate::migrate::{BackgroundMigrator, MigratorConfig};
use crate::observed_data_sidecars::ObservedDataSidecars;
Expand Down Expand Up @@ -562,9 +563,30 @@ where
.put_block(&weak_subj_block_root, weak_subj_block.clone())
.map_err(|e| format!("Failed to store weak subjectivity block: {e:?}"))?;
if let Some(blobs) = weak_subj_blobs {
store
.put_blobs(&weak_subj_block_root, blobs)
.map_err(|e| format!("Failed to store weak subjectivity blobs: {e:?}"))?;
if self
.spec
.is_peer_das_enabled_for_epoch(weak_subj_block.epoch())
{
// After PeerDAS recompute columns from blobs to not force the checkpointz server
// into exposing another route.
let blobs = blobs
.iter()
.map(|blob_sidecar| &blob_sidecar.blob)
.collect::<Vec<_>>();
let data_columns =
blobs_to_data_column_sidecars(&blobs, &weak_subj_block, &self.kzg, &self.spec)
.map_err(|e| {
format!("Failed to compute weak subjectivity data_columns: {e:?}")
})?;
// TODO(das): only persist the columns under custody
store
.put_data_columns(&weak_subj_block_root, data_columns)
.map_err(|e| format!("Failed to store weak subjectivity data_column: {e:?}"))?;
} else {
store
.put_blobs(&weak_subj_block_root, blobs)
.map_err(|e| format!("Failed to store weak subjectivity blobs: {e:?}"))?;
}
}

// Stage the database's metadata fields for atomic storage when `build` is called.
Expand Down
18 changes: 18 additions & 0 deletions beacon_node/store/src/hot_cold_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -863,6 +863,24 @@ impl<E: EthSpec, Hot: ItemStore<E>, Cold: ItemStore<E>> HotColdDB<E, Hot, Cold>
));
}

pub fn put_data_columns(
&self,
block_root: &Hash256,
data_columns: DataColumnSidecarList<E>,
) -> Result<(), Error> {
for data_column in data_columns {
self.blobs_db.put_bytes(
DBColumn::BeaconDataColumn,
&get_data_column_key(block_root, &data_column.index),
&data_column.as_ssz_bytes(),
)?;
self.block_cache
.lock()
.put_data_column(*block_root, data_column);
}
Ok(())
}

pub fn data_columns_as_kv_store_ops(
&self,
block_root: &Hash256,
Expand Down

0 comments on commit 027bb97

Please sign in to comment.