Skip to content

Commit

Permalink
Tests compile
Browse files Browse the repository at this point in the history
  • Loading branch information
pawanjay176 committed Nov 7, 2023
1 parent a0e4025 commit c0b27ad
Show file tree
Hide file tree
Showing 11 changed files with 116 additions and 150 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -755,7 +755,7 @@ mod test {
use std::ops::AddAssign;
use store::{HotColdDB, ItemStore, LevelDB, StoreConfig};
use tempfile::{tempdir, TempDir};
use types::{ChainSpec, ExecPayload, MinimalEthSpec};
use types::{ChainSpec, ExecPayload, MinimalEthSpec, Sidecar};

const LOW_VALIDATOR_COUNT: usize = 32;

Expand Down Expand Up @@ -920,12 +920,23 @@ mod test {
}
info!(log, "done printing kzg commitments");

let gossip_verified_blobs = if let Some(blobs) = maybe_blobs {
Vec::from(blobs)
let gossip_verified_blobs = if let Some((kzg_proofs, blobs)) = maybe_blobs {
let sidecars = BlobSidecar::build_sidecar(
blobs,
&block,
block
.message()
.body()
.blob_kzg_commitments()
.expect("should be deneb fork"),
kzg_proofs.into(),
)
.unwrap();
Vec::from(sidecars)
.into_iter()
.map(|signed_blob| {
let subnet = signed_blob.message.index;
validate_blob_sidecar_for_gossip(signed_blob, subnet, &harness.chain)
.map(|sidecar| {
let subnet = sidecar.index;
validate_blob_sidecar_for_gossip(sidecar, subnet, &harness.chain)
.expect("should validate blob")
})
.collect()
Expand Down Expand Up @@ -1038,7 +1049,7 @@ mod test {
.expect("kzg should exist");
let mut kzg_verified_blobs = Vec::new();
for (blob_index, gossip_blob) in blobs.into_iter().enumerate() {
let kzg_verified_blob = verify_kzg_for_blob(gossip_blob.to_blob(), kzg.as_ref())
let kzg_verified_blob = verify_kzg_for_blob(gossip_blob.into_inner(), kzg.as_ref())
.expect("kzg should verify");
kzg_verified_blobs.push(kzg_verified_blob);
let availability = cache
Expand Down Expand Up @@ -1066,7 +1077,7 @@ mod test {
let root = pending_block.import_data.block_root;
let mut kzg_verified_blobs = vec![];
for gossip_blob in blobs {
let kzg_verified_blob = verify_kzg_for_blob(gossip_blob.to_blob(), kzg.as_ref())
let kzg_verified_blob = verify_kzg_for_blob(gossip_blob.into_inner(), kzg.as_ref())
.expect("kzg should verify");
kzg_verified_blobs.push(kzg_verified_blob);
let availability = cache
Expand Down Expand Up @@ -1203,7 +1214,7 @@ mod test {
let expected_blobs = blobs_0.len();
let mut kzg_verified_blobs = vec![];
for (blob_index, gossip_blob) in blobs_0.into_iter().enumerate() {
let kzg_verified_blob = verify_kzg_for_blob(gossip_blob.to_blob(), kzg.as_ref())
let kzg_verified_blob = verify_kzg_for_blob(gossip_blob.into_inner(), kzg.as_ref())
.expect("kzg should verify");
kzg_verified_blobs.push(kzg_verified_blob);
let availability = cache
Expand Down Expand Up @@ -1289,7 +1300,7 @@ mod test {
let one_blob = pending_block_blobs
.pop()
.expect("should have at least one blob");
let kzg_verified_blob = verify_kzg_for_blob(one_blob.to_blob(), kzg.as_ref())
let kzg_verified_blob = verify_kzg_for_blob(one_blob.into_inner(), kzg.as_ref())
.expect("kzg should verify");
let kzg_verified_blobs = vec![kzg_verified_blob];
// generate random boolean
Expand Down Expand Up @@ -1430,7 +1441,7 @@ mod test {
let one_blob = pending_block_blobs
.pop()
.expect("should have at least one blob");
let kzg_verified_blob = verify_kzg_for_blob(one_blob.to_blob(), kzg.as_ref())
let kzg_verified_blob = verify_kzg_for_blob(one_blob.into_inner(), kzg.as_ref())
.expect("kzg should verify");
let kzg_verified_blobs = vec![kzg_verified_blob];
// generate random boolean
Expand Down Expand Up @@ -1545,7 +1556,7 @@ mod test {
let additional_blobs = blobs.len();
let mut kzg_verified_blobs = vec![];
for (i, gossip_blob) in blobs.into_iter().enumerate() {
let kzg_verified_blob = verify_kzg_for_blob(gossip_blob.to_blob(), kzg.as_ref())
let kzg_verified_blob = verify_kzg_for_blob(gossip_blob.into_inner(), kzg.as_ref())
.expect("kzg should verify");
kzg_verified_blobs.push(kzg_verified_blob);
let availability = recovered_cache
Expand Down
1 change: 1 addition & 0 deletions beacon_node/beacon_chain/src/observed_blob_sidecars.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@ impl<T: EthSpec> ObservedBlobSidecars<T> {
#[cfg(test)]
mod tests {
use super::*;
use std::sync::Arc;
use types::{BlobSidecar, Hash256, MainnetEthSpec};

type E = MainnetEthSpec;
Expand Down
95 changes: 41 additions & 54 deletions beacon_node/beacon_chain/tests/block_verification.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
#![cfg(not(debug_assertions))]
// #![cfg(not(debug_assertions))]

use beacon_chain::block_verification_types::{AsBlock, ExecutedBlock, RpcBlock};
use beacon_chain::test_utils::BlobSignatureKey;
use beacon_chain::{
test_utils::{AttestationStrategy, BeaconChainHarness, BlockStrategy, EphemeralHarnessType},
AvailabilityProcessingStatus, BeaconChain, BeaconChainTypes, ExecutionPendingBlock,
Expand Down Expand Up @@ -77,10 +76,8 @@ async fn get_chain_segment() -> (Vec<BeaconSnapshot<E>>, Vec<Option<BlobSidecarL
(segment, segment_blobs)
}

async fn get_chain_segment_with_signed_blobs() -> (
Vec<BeaconSnapshot<E>>,
Vec<Option<VariableList<SignedBlobSidecar<E>, <E as EthSpec>::MaxBlobsPerBlock>>>,
) {
async fn get_chain_segment_with_blob_sidecars(
) -> (Vec<BeaconSnapshot<E>>, Vec<Option<BlobSidecarList<E>>>) {
let harness = get_harness(VALIDATOR_COUNT);

harness
Expand Down Expand Up @@ -111,27 +108,11 @@ async fn get_chain_segment_with_signed_blobs() -> (
beacon_block: Arc::new(full_block),
beacon_state: snapshot.beacon_state,
});
let signed_blobs = harness
let blob_sidecars = harness
.chain
.get_blobs(&snapshot.beacon_block_root)
.unwrap()
.into_iter()
.map(|blob| {
let block_root = blob.block_root;
let blob_index = blob.index;
SignedBlobSidecar {
message: blob,
signature: harness
.blob_signature_cache
.read()
.get(&BlobSignatureKey::new(block_root, blob_index))
.unwrap()
.clone(),
_phantom: PhantomData,
}
})
.collect::<Vec<_>>();
segment_blobs.push(Some(VariableList::from(signed_blobs)))
.unwrap();
segment_blobs.push(Some(VariableList::from(blob_sidecars)))
}
(segment, segment_blobs)
}
Expand Down Expand Up @@ -214,34 +195,28 @@ fn update_parent_roots(
let (mut block, signature) = child.beacon_block.as_ref().clone().deconstruct();
*block.parent_root_mut() = root;
let new_child = Arc::new(SignedBeaconBlock::from_block(block, signature));
let new_child_root = new_child.canonical_root();
child.beacon_block = new_child;
if let Some(blobs) = child_blobs {
update_blob_roots(new_child_root, blobs);
update_blob_signed_header(&new_child, blobs);
}
child.beacon_block = new_child;
}
}
}

fn update_blob_roots<E: EthSpec>(block_root: Hash256, blobs: &mut BlobSidecarList<E>) {
fn update_blob_signed_header<E: EthSpec>(
signed_block: &SignedBeaconBlock<E>,
blobs: &mut BlobSidecarList<E>,
) {
for old_blob_sidecar in blobs.iter_mut() {
let index = old_blob_sidecar.index;
let slot = old_blob_sidecar.slot;
let block_parent_root = old_blob_sidecar.block_parent_root;
let proposer_index = old_blob_sidecar.proposer_index;
let blob = old_blob_sidecar.blob.clone();
let kzg_commitment = old_blob_sidecar.kzg_commitment;
let kzg_proof = old_blob_sidecar.kzg_proof;

let new_blob = Arc::new(BlobSidecar::<E> {
block_root,
index,
slot,
block_parent_root,
proposer_index,
blob,
kzg_commitment,
kzg_proof,
index: old_blob_sidecar.index,
blob: old_blob_sidecar.blob.clone(),
kzg_commitment: old_blob_sidecar.kzg_commitment,
kzg_proof: old_blob_sidecar.kzg_proof,
signed_block_header: signed_block.signed_block_header(),
kzg_commitment_inclusion_proof: signed_block
.kzg_commitment_merkle_proof(old_blob_sidecar.index as usize)
.unwrap(),
});
*old_blob_sidecar = new_blob;
}
Expand Down Expand Up @@ -879,7 +854,7 @@ fn unwrap_err<T, E>(result: Result<T, E>) -> E {
#[tokio::test]
async fn block_gossip_verification() {
let harness = get_harness(VALIDATOR_COUNT);
let (chain_segment, chain_segment_blobs) = get_chain_segment_with_signed_blobs().await;
let (chain_segment, chain_segment_blobs) = get_chain_segment_with_blob_sidecars().await;

let block_index = CHAIN_SEGMENT_LENGTH - 2;

Expand Down Expand Up @@ -909,12 +884,12 @@ async fn block_gossip_verification() {
)
.await
.expect("should import valid gossip verified block");
if let Some(blobs) = blobs_opt {
for blob in blobs {
let blob_index = blob.message.index;
if let Some(blob_sidecars) = blobs_opt {
for blob_sidecar in blob_sidecars {
let blob_index = blob_sidecar.index;
let gossip_verified = harness
.chain
.verify_blob_sidecar_for_gossip(blob.clone(), blob_index)
.verify_blob_sidecar_for_gossip(blob_sidecar.clone(), blob_index)
.expect("should obtain gossip verified blob");

harness
Expand Down Expand Up @@ -1178,12 +1153,24 @@ async fn verify_block_for_gossip_slashing_detection() {
.await
.unwrap();

if let Some(blobs) = blobs1 {
for blob in blobs {
let blob_index = blob.message.index;
if let Some((kzg_proofs, blobs)) = blobs1 {
let sidecars = BlobSidecar::build_sidecar(
blobs,
verified_block.block(),
verified_block
.block()
.message()
.body()
.blob_kzg_commitments()
.unwrap(),
kzg_proofs.into(),
)
.unwrap();
for sidecar in sidecars {
let blob_index = sidecar.index;
let verified_blob = harness
.chain
.verify_blob_sidecar_for_gossip(blob, blob_index)
.verify_blob_sidecar_for_gossip(sidecar, blob_index)
.unwrap();
harness
.chain
Expand Down
21 changes: 8 additions & 13 deletions beacon_node/beacon_chain/tests/events.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
use beacon_chain::blob_verification::GossipVerifiedBlob;
use beacon_chain::test_utils::BeaconChainHarness;
use bls::Signature;
use eth2::types::{EventKind, SseBlobSidecar};
use rand::rngs::StdRng;
use rand::SeedableRng;
use std::marker::PhantomData;
use std::ops::Deref;
use std::sync::Arc;
use types::blob_sidecar::FixedBlobSidecarList;
use types::{BlobSidecar, EthSpec, ForkName, MinimalEthSpec, SignedBlobSidecar};
use types::{BlobSidecar, EthSpec, ForkName, MinimalEthSpec};

type E = MinimalEthSpec;

Expand All @@ -29,15 +28,11 @@ async fn blob_sidecar_event_on_process_gossip_blob() {
// build and process a gossip verified blob
let kzg = harness.chain.kzg.as_ref().unwrap();
let mut rng = StdRng::seed_from_u64(0xDEADBEEF0BAD5EEDu64);
let signed_sidecar = SignedBlobSidecar {
message: BlobSidecar::random_valid(&mut rng, kzg)
.map(Arc::new)
.unwrap(),
signature: Signature::empty(),
_phantom: PhantomData,
};
let gossip_verified_blob = GossipVerifiedBlob::__assumed_valid(signed_sidecar);
let expected_sse_blobs = SseBlobSidecar::from_blob_sidecar(gossip_verified_blob.as_blob());
let sidecar = BlobSidecar::random_valid(&mut rng, kzg)
.map(Arc::new)
.unwrap();
let gossip_verified_blob = GossipVerifiedBlob::__assumed_valid(sidecar);
let expected_sse_blobs = SseBlobSidecar::from_blob_sidecar(gossip_verified_blob.deref());

let _ = harness
.chain
Expand Down Expand Up @@ -83,7 +78,7 @@ async fn blob_sidecar_event_on_process_rpc_blobs() {

let _ = harness
.chain
.process_rpc_blobs(blob_1.slot, blob_1.block_root, blobs)
.process_rpc_blobs(blob_1.slot(), blob_1.block_root(), blobs)
.await
.unwrap();

Expand Down
4 changes: 2 additions & 2 deletions beacon_node/http_api/src/publish_blocks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,8 @@ pub async fn publish_block<T: BeaconChainTypes, B: IntoGossipVerifiedBlockConten
}
SignedBeaconBlock::Deneb(_) => {
let mut pubsub_messages = vec![PubsubMessage::BeaconBlock(block.clone())];
if let Some(signed_blobs) = blobs_opt {
for (blob_index, blob) in signed_blobs.into_iter().enumerate() {
if let Some(blob_sidecars) = blobs_opt {
for (blob_index, blob) in blob_sidecars.into_iter().enumerate() {
pubsub_messages.push(PubsubMessage::BlobSidecar(Box::new((
blob_index as u64,
blob,
Expand Down
20 changes: 4 additions & 16 deletions beacon_node/http_api/tests/broadcast_validation_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,7 @@ use http_api::test_utils::InteractiveTester;
use http_api::{publish_blinded_block, publish_block, reconstruct_block, ProvenancedBlock};
use std::sync::Arc;
use tree_hash::TreeHash;
use types::{
BlindedBlobSidecar, BlindedPayload, BlobSidecar, FullPayload, Hash256, MainnetEthSpec,
SignedSidecarList, Slot,
};
use types::{BlindedPayload, FullPayload, Hash256, MainnetEthSpec, Slot};
use warp::Rejection;
use warp_utils::reject::CustomBadRequest;

Expand Down Expand Up @@ -1404,16 +1401,7 @@ pub async fn blinded_equivocation_full_pass() {
fn into_signed_blinded_block_contents(
block_contents_tuple: SignedBlockContentsTuple<E, FullPayload<E>>,
) -> SignedBlockContents<E, BlindedPayload<E>> {
let (block, maybe_blobs) = block_contents_tuple;
SignedBlockContents::new(block.into(), maybe_blobs.map(into_blinded_blob_sidecars))
}

fn into_blinded_blob_sidecars(
blobs: SignedSidecarList<E, BlobSidecar<E>>,
) -> SignedSidecarList<E, BlindedBlobSidecar> {
blobs
.into_iter()
.map(|blob| blob.into())
.collect::<Vec<_>>()
.into()
let (block, blob_items) = block_contents_tuple;
// TODO(pawan): recheck if we want to keep the BlobsRootList for the blinded variant
SignedBlockContents::new(block.into(), None)
}
6 changes: 1 addition & 5 deletions beacon_node/http_api/tests/interactive_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -641,13 +641,9 @@ pub async fn proposer_boost_re_org_test(
assert_eq!(block_c.parent_root(), block_b_root);
}

// Sign blobs.
let block_c_signed_blobs =
block_c_blobs.map(|blobs| harness.sign_blobs(blobs, &state_b, proposer_index));

// Applying block C should cause it to become head regardless (re-org or continuation).
let block_root_c = harness
.process_block_result((block_c.clone(), block_c_signed_blobs))
.process_block_result((block_c.clone(), block_c_blobs))
.await
.unwrap()
.into();
Expand Down
Loading

0 comments on commit c0b27ad

Please sign in to comment.