-
Notifications
You must be signed in to change notification settings - Fork 931
Fix CGC backfill race condition #8267
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
mergify
merged 4 commits into
sigp:release-v8.0
from
eserilev:cgc-backfill-race-condition
Nov 3, 2025
+103
−91
Merged
Changes from all commits
Commits
Show all changes
4 commits
Select commit
Hold shift + click to select a range
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -3182,13 +3182,16 @@ async fn weak_subjectivity_sync_test( | |
| assert_eq!(store.get_anchor_info().state_upper_limit, Slot::new(0)); | ||
| } | ||
|
|
||
| // This test prunes data columns from epoch 0 and then tries to re-import them via | ||
| // the same code paths that custody backfill sync imports data columns | ||
| #[tokio::test] | ||
| async fn test_import_historical_data_columns_batch() { | ||
| let spec = ForkName::Fulu.make_genesis_spec(E::default_spec()); | ||
| let db_path = tempdir().unwrap(); | ||
| let store = get_store_generic(&db_path, StoreConfig::default(), spec); | ||
| let start_slot = Epoch::new(0).start_slot(E::slots_per_epoch()) + 1; | ||
| let end_slot = Epoch::new(0).end_slot(E::slots_per_epoch()); | ||
| let cgc = 128; | ||
|
|
||
| let harness = get_harness_import_all_data_columns(store.clone(), LOW_VALIDATOR_COUNT); | ||
|
|
||
|
|
@@ -3208,6 +3211,7 @@ async fn test_import_historical_data_columns_batch() { | |
|
|
||
| let mut data_columns_list = vec![]; | ||
|
|
||
| // Get all data columns for epoch 0 | ||
| for block in block_root_iter { | ||
| let (block_root, _) = block.unwrap(); | ||
| let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap(); | ||
|
|
@@ -3227,6 +3231,7 @@ async fn test_import_historical_data_columns_batch() { | |
|
|
||
| harness.advance_slot(); | ||
|
|
||
| // Prune data columns | ||
| harness | ||
| .chain | ||
| .store | ||
|
|
@@ -3238,21 +3243,25 @@ async fn test_import_historical_data_columns_batch() { | |
| .forwards_iter_block_roots_until(start_slot, end_slot) | ||
| .unwrap(); | ||
|
|
||
| // Assert that data columns no longer exist for epoch 0 | ||
| for block in block_root_iter { | ||
| let (block_root, _) = block.unwrap(); | ||
| let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap(); | ||
| assert!(data_columns.is_none()) | ||
| } | ||
|
|
||
| // Re-import deleted data columns | ||
| harness | ||
| .chain | ||
| .import_historical_data_column_batch(Epoch::new(0), data_columns_list) | ||
| .import_historical_data_column_batch(Epoch::new(0), data_columns_list, cgc) | ||
| .unwrap(); | ||
|
|
||
| let block_root_iter = harness | ||
| .chain | ||
| .forwards_iter_block_roots_until(start_slot, end_slot) | ||
| .unwrap(); | ||
|
|
||
| // Assert that data columns now exist for epoch 0 | ||
jimmygchen marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| for block in block_root_iter { | ||
| let (block_root, _) = block.unwrap(); | ||
| let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap(); | ||
|
|
@@ -3261,13 +3270,15 @@ async fn test_import_historical_data_columns_batch() { | |
| } | ||
|
|
||
| // This should verify that a data column sidecar containing mismatched block roots should fail to be imported. | ||
| // This also covers any test cases related to data columns with incorrect/invalid/mismatched block roots. | ||
| #[tokio::test] | ||
| async fn test_import_historical_data_columns_batch_mismatched_block_root() { | ||
| let spec = ForkName::Fulu.make_genesis_spec(E::default_spec()); | ||
| let db_path = tempdir().unwrap(); | ||
| let store = get_store_generic(&db_path, StoreConfig::default(), spec); | ||
| let start_slot = Slot::new(1); | ||
| let end_slot = Slot::new(E::slots_per_epoch() * 2 - 1); | ||
| let cgc = 128; | ||
jimmygchen marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
|
||
| let harness = get_harness_import_all_data_columns(store.clone(), LOW_VALIDATOR_COUNT); | ||
|
|
||
|
|
@@ -3287,6 +3298,8 @@ async fn test_import_historical_data_columns_batch_mismatched_block_root() { | |
|
|
||
| let mut data_columns_list = vec![]; | ||
|
|
||
| // Get all data columns from start_slot to end_slot | ||
| // and mutate the data columns with an invalid block root | ||
| for block in block_root_iter { | ||
| let (block_root, _) = block.unwrap(); | ||
| let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap(); | ||
|
|
@@ -3312,6 +3325,7 @@ async fn test_import_historical_data_columns_batch_mismatched_block_root() { | |
|
|
||
| harness.advance_slot(); | ||
|
|
||
| // Prune blobs | ||
| harness | ||
| .chain | ||
| .store | ||
|
|
@@ -3323,17 +3337,20 @@ async fn test_import_historical_data_columns_batch_mismatched_block_root() { | |
| .forwards_iter_block_roots_until(start_slot, end_slot) | ||
| .unwrap(); | ||
|
|
||
| // Assert there are no columns between start_slot and end_slot | ||
| for block in block_root_iter { | ||
| let (block_root, _) = block.unwrap(); | ||
| let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap(); | ||
| assert!(data_columns.is_none()) | ||
| } | ||
|
|
||
| // Attempt to import data columns with invalid block roots and expect a failure | ||
| let error = harness | ||
| .chain | ||
| .import_historical_data_column_batch( | ||
| start_slot.epoch(E::slots_per_epoch()), | ||
| data_columns_list, | ||
| cgc, | ||
| ) | ||
| .unwrap_err(); | ||
|
|
||
|
|
@@ -3343,84 +3360,6 @@ async fn test_import_historical_data_columns_batch_mismatched_block_root() { | |
| )); | ||
| } | ||
|
|
||
| // This should verify that a data column sidecar associated to a block root that doesn't exist in the store cannot | ||
| // be imported. | ||
| #[tokio::test] | ||
| async fn test_import_historical_data_columns_batch_no_block_found() { | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. this was a redundant test case |
||
| let spec = ForkName::Fulu.make_genesis_spec(E::default_spec()); | ||
| let db_path = tempdir().unwrap(); | ||
| let store = get_store_generic(&db_path, StoreConfig::default(), spec); | ||
| let start_slot = Slot::new(1); | ||
| let end_slot = Slot::new(E::slots_per_epoch() * 2 - 1); | ||
|
|
||
| let harness = get_harness_import_all_data_columns(store.clone(), LOW_VALIDATOR_COUNT); | ||
|
|
||
| harness | ||
| .extend_chain( | ||
| (E::slots_per_epoch() * 2) as usize, | ||
| BlockStrategy::OnCanonicalHead, | ||
| AttestationStrategy::AllValidators, | ||
| ) | ||
| .await; | ||
| harness.advance_slot(); | ||
|
|
||
| let block_root_iter = harness | ||
| .chain | ||
| .forwards_iter_block_roots_until(start_slot, end_slot) | ||
| .unwrap(); | ||
|
|
||
| let mut data_columns_list = vec![]; | ||
|
|
||
| for block in block_root_iter { | ||
| let (block_root, _) = block.unwrap(); | ||
| let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap(); | ||
| assert!(data_columns.is_some()); | ||
|
|
||
| for data_column in data_columns.unwrap() { | ||
| let mut data_column = (*data_column).clone(); | ||
| data_column.signed_block_header.message.body_root = Hash256::ZERO; | ||
| data_columns_list.push(Arc::new(data_column)); | ||
| } | ||
| } | ||
|
|
||
| harness | ||
| .extend_chain( | ||
| (E::slots_per_epoch() * 4) as usize, | ||
| BlockStrategy::OnCanonicalHead, | ||
| AttestationStrategy::AllValidators, | ||
| ) | ||
| .await; | ||
|
|
||
| harness.advance_slot(); | ||
|
|
||
| harness | ||
| .chain | ||
| .store | ||
| .try_prune_blobs(true, Epoch::new(2)) | ||
| .unwrap(); | ||
|
|
||
| let block_root_iter = harness | ||
| .chain | ||
| .forwards_iter_block_roots_until(start_slot, end_slot) | ||
| .unwrap(); | ||
|
|
||
| for block in block_root_iter { | ||
| let (block_root, _) = block.unwrap(); | ||
| let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap(); | ||
| assert!(data_columns.is_none()) | ||
| } | ||
|
|
||
| let error = harness | ||
| .chain | ||
| .import_historical_data_column_batch(Epoch::new(0), data_columns_list) | ||
| .unwrap_err(); | ||
|
|
||
| assert!(matches!( | ||
| error, | ||
| HistoricalDataColumnError::NoBlockFound { .. } | ||
| )); | ||
| } | ||
|
|
||
| /// Test that blocks and attestations that refer to states around an unaligned split state are | ||
| /// processed correctly. | ||
| #[tokio::test] | ||
|
|
||
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
nit: could use either
E::number_of_columnsorharness.chain.data_availability_checker.custody_context().custody_group_count_at_head(&sepc)instead.