From 3a2c7d5012f518430adec1b2e0b38fdf076047bc Mon Sep 17 00:00:00 2001 From: Eitan Seri-Levi Date: Wed, 5 Feb 2025 13:37:04 +0200 Subject: [PATCH 01/11] Add builder ssz flow --- Cargo.lock | 29 ++- Cargo.toml | 4 + crates/common/Cargo.toml | 4 + crates/common/src/pbs/types/beacon_block.rs | 7 +- .../src/pbs/types/blinded_block_body.rs | 35 ++-- crates/common/src/pbs/types/blobs_bundle.rs | 3 +- .../common/src/pbs/types/execution_payload.rs | 7 +- crates/common/src/pbs/types/get_header.rs | 5 +- crates/common/src/pbs/types/kzg.rs | 39 ++++ crates/common/src/pbs/types/utils.rs | 13 ++ crates/common/src/utils.rs | 166 +++++++++++++++++- crates/pbs/Cargo.toml | 1 + crates/pbs/src/routes/get_header.rs | 33 +++- crates/pbs/src/routes/submit_block.rs | 37 +++- 14 files changed, 339 insertions(+), 44 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9a9a6389..0cebdf56 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -225,7 +225,7 @@ dependencies = [ "c-kzg", "derive_more", "ethereum_ssz 0.8.0", - "ethereum_ssz_derive", + "ethereum_ssz_derive 0.8.0", "once_cell", "serde", "sha2 0.10.8", @@ -479,7 +479,7 @@ dependencies = [ "alloy-rpc-types-engine", "alloy-serde", "ethereum_ssz 0.8.0", - "ethereum_ssz_derive", + "ethereum_ssz_derive 0.8.0", "serde", "serde_with", "thiserror 2.0.6", @@ -498,7 +498,7 @@ dependencies = [ "alloy-serde", "derive_more", "ethereum_ssz 0.8.0", - "ethereum_ssz_derive", + "ethereum_ssz_derive 0.8.0", "serde", "strum", ] @@ -1307,13 +1307,17 @@ dependencies = [ "base64 0.22.1", "bimap", "blst", + "bytes", "cipher 0.4.4", "ctr 0.9.2", "derive_more", "eth2_keystore", "ethereum_serde_utils 0.7.0", + "ethereum_ssz 0.7.1", + "ethereum_ssz_derive 0.7.1", "eyre", "k256", + "mediatype", "pbkdf2 0.12.2", "rand", "reqwest", @@ -1358,6 +1362,7 @@ dependencies = [ "cb-common", "cb-metrics", "dashmap 5.5.3", + "ethereum_ssz 0.7.1", "eyre", "futures", "lazy_static", @@ -2135,6 +2140,18 @@ dependencies = [ "typenum", ] +[[package]] +name = "ethereum_ssz_derive" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3deae99c8e74829a00ba7a92d49055732b3c1f093f2ccfa3cbc621679b6fa91" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 2.0.90", +] + [[package]] name = "ethereum_ssz_derive" version = "0.8.0" @@ -2845,6 +2862,12 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" +[[package]] +name = "mediatype" +version = "0.19.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8878cd8d1b3c8c8ae4b2ba0a36652b7cf192f618a599a7fbdfa25cffd4ea72dd" + [[package]] name = "memchr" version = "2.7.2" diff --git a/Cargo.toml b/Cargo.toml index 1bf3de64..b60d4f30 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -39,6 +39,8 @@ alloy = { version = "0.8.0", features = [ ] } ssz_types = "0.8" ethereum_serde_utils = "0.7.0" +ethereum_ssz = "0.7" +ethereum_ssz_derive = "0.7" # networking axum = { version = "0.8.1", features = ["macros"] } @@ -101,3 +103,5 @@ derive_more = { version = "1.0.0", features = [ "deref", "display", ] } +mediatype = "0.19.13" +bytes = "1.6" diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 1ff1c979..a77dd25c 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -9,6 +9,8 @@ publish = false # ethereum alloy.workspace = true ssz_types.workspace = true +ethereum_ssz.workspace = true +ethereum_ssz_derive.workspace = true ethereum_serde_utils.workspace = true # networking @@ -49,6 +51,8 @@ url.workspace = true rand.workspace = true bimap.workspace = true derive_more.workspace = true +mediatype.workspace = true +bytes.workspace = true unicode-normalization.workspace = true base64.workspace = true diff --git a/crates/common/src/pbs/types/beacon_block.rs b/crates/common/src/pbs/types/beacon_block.rs index e17f1f8a..289637bf 100644 --- a/crates/common/src/pbs/types/beacon_block.rs +++ b/crates/common/src/pbs/types/beacon_block.rs @@ -1,12 +1,13 @@ use alloy::{primitives::B256, rpc::types::beacon::BlsSignature}; use serde::{Deserialize, Serialize}; +use ssz_derive::{Decode, Encode}; use super::{ blinded_block_body::BlindedBeaconBlockBody, blobs_bundle::BlobsBundle, execution_payload::ExecutionPayload, spec::DenebSpec, utils::VersionedResponse, }; -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] /// Sent to relays in submit_block pub struct SignedBlindedBeaconBlock { pub message: BlindedBeaconBlock, @@ -19,7 +20,7 @@ impl SignedBlindedBeaconBlock { } } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] pub struct BlindedBeaconBlock { #[serde(with = "serde_utils::quoted_u64")] pub slot: u64, @@ -33,7 +34,7 @@ pub struct BlindedBeaconBlock { /// Returned by relay in submit_block pub type SubmitBlindedBlockResponse = VersionedResponse; -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode)] pub struct PayloadAndBlobs { pub execution_payload: ExecutionPayload, pub blobs_bundle: Option>, diff --git a/crates/common/src/pbs/types/blinded_block_body.rs b/crates/common/src/pbs/types/blinded_block_body.rs index ded4f215..20785954 100644 --- a/crates/common/src/pbs/types/blinded_block_body.rs +++ b/crates/common/src/pbs/types/blinded_block_body.rs @@ -3,6 +3,7 @@ use alloy::{ rpc::types::beacon::{BlsPublicKey, BlsSignature}, }; use serde::{Deserialize, Serialize}; +use ssz_derive::Decode; use ssz_types::{typenum, BitList, BitVector, FixedVector, VariableList}; use super::{ @@ -10,7 +11,7 @@ use super::{ }; use crate::utils::as_str; -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] pub struct BlindedBeaconBlockBody { pub randao_reveal: BlsSignature, pub eth1_data: Eth1Data, @@ -27,7 +28,7 @@ pub struct BlindedBeaconBlockBody { pub blob_kzg_commitments: KzgCommitments, } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] pub struct Eth1Data { pub deposit_root: B256, #[serde(with = "serde_utils::quoted_u64")] @@ -35,7 +36,7 @@ pub struct Eth1Data { pub block_hash: B256, } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] pub struct BeaconBlockHeader { #[serde(with = "serde_utils::quoted_u64")] pub slot: u64, @@ -46,13 +47,13 @@ pub struct BeaconBlockHeader { pub body_root: B256, } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] pub struct SignedBeaconBlockHeader { pub message: BeaconBlockHeader, pub signature: BlsSignature, } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] pub struct BlsToExecutionChange { #[serde(with = "as_str")] pub validator_index: u64, @@ -60,25 +61,25 @@ pub struct BlsToExecutionChange { pub to_execution_address: Address, } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] pub struct SignedBlsToExecutionChange { pub message: BlsToExecutionChange, pub signature: BlsSignature, } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] pub struct ProposerSlashing { pub signed_header_1: SignedBeaconBlockHeader, pub signed_header_2: SignedBeaconBlockHeader, } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] pub struct AttesterSlashing { pub attestation_1: IndexedAttestation, pub attestation_2: IndexedAttestation, } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] #[serde(bound = "T: EthSpec")] pub struct IndexedAttestation { /// Lists validator registry indices, not committee indices. @@ -88,7 +89,7 @@ pub struct IndexedAttestation { pub signature: BlsSignature, } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] pub struct AttestationData { #[serde(with = "serde_utils::quoted_u64")] pub slot: u64, @@ -101,14 +102,14 @@ pub struct AttestationData { pub target: Checkpoint, } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] pub struct Checkpoint { #[serde(with = "serde_utils::quoted_u64")] pub epoch: u64, pub root: B256, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, Decode)] #[serde(bound = "T: EthSpec")] pub struct Attestation { pub aggregation_bits: BitList, @@ -116,13 +117,13 @@ pub struct Attestation { pub signature: BlsSignature, } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] pub struct Deposit { pub proof: FixedVector, // put this in EthSpec? pub data: DepositData, } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] pub struct DepositData { pub pubkey: BlsPublicKey, pub withdrawal_credentials: B256, @@ -131,13 +132,13 @@ pub struct DepositData { pub signature: BlsSignature, } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] pub struct SignedVoluntaryExit { pub message: VoluntaryExit, pub signature: BlsSignature, } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] pub struct VoluntaryExit { /// Earliest epoch when voluntary exit can be processed. #[serde(with = "serde_utils::quoted_u64")] @@ -146,7 +147,7 @@ pub struct VoluntaryExit { pub validator_index: u64, } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] #[serde(bound = "T: EthSpec")] pub struct SyncAggregate { pub sync_committee_bits: BitVector, diff --git a/crates/common/src/pbs/types/blobs_bundle.rs b/crates/common/src/pbs/types/blobs_bundle.rs index 778679b2..4f95beda 100644 --- a/crates/common/src/pbs/types/blobs_bundle.rs +++ b/crates/common/src/pbs/types/blobs_bundle.rs @@ -1,4 +1,5 @@ use serde::{Deserialize, Serialize}; +use ssz_derive::Encode; use ssz_types::{FixedVector, VariableList}; use super::{ @@ -6,7 +7,7 @@ use super::{ spec::EthSpec, }; -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode)] #[serde(bound = "T: EthSpec")] pub struct BlobsBundle { pub commitments: KzgCommitments, diff --git a/crates/common/src/pbs/types/execution_payload.rs b/crates/common/src/pbs/types/execution_payload.rs index e593f5f1..6116fac5 100644 --- a/crates/common/src/pbs/types/execution_payload.rs +++ b/crates/common/src/pbs/types/execution_payload.rs @@ -1,5 +1,6 @@ use alloy::primitives::{b256, Address, B256, U256}; use serde::{Deserialize, Serialize}; +use ssz_derive::{Decode, Encode}; use ssz_types::{FixedVector, VariableList}; use tree_hash_derive::TreeHash; @@ -9,7 +10,7 @@ use crate::utils::as_str; pub const EMPTY_TX_ROOT_HASH: B256 = b256!("7ffe241ea60187fdb0187bfa22de35d1f9bed7ab061d9401fd47e34a54fbede1"); -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode)] pub struct ExecutionPayload { pub parent_hash: B256, pub fee_recipient: Address, @@ -46,7 +47,7 @@ pub type Transactions = VariableList< >; pub type Transaction = VariableList; -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode)] pub struct Withdrawal { #[serde(with = "serde_utils::quoted_u64")] pub index: u64, @@ -57,7 +58,7 @@ pub struct Withdrawal { pub amount: u64, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, TreeHash)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, TreeHash, Encode, Decode)] pub struct ExecutionPayloadHeader { pub parent_hash: B256, pub fee_recipient: Address, diff --git a/crates/common/src/pbs/types/get_header.rs b/crates/common/src/pbs/types/get_header.rs index ebffa946..55b893db 100644 --- a/crates/common/src/pbs/types/get_header.rs +++ b/crates/common/src/pbs/types/get_header.rs @@ -3,6 +3,7 @@ use alloy::{ rpc::types::beacon::{BlsPublicKey, BlsSignature}, }; use serde::{Deserialize, Serialize}; +use ssz_derive::Encode; use tree_hash_derive::TreeHash; use super::{ @@ -37,13 +38,13 @@ impl GetHeaderResponse { } } -#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode)] pub struct SignedExecutionPayloadHeader { pub message: ExecutionPayloadHeaderMessage, pub signature: BlsSignature, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, TreeHash)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, TreeHash, Encode)] pub struct ExecutionPayloadHeaderMessage { pub header: ExecutionPayloadHeader, pub blob_kzg_commitments: KzgCommitments, diff --git a/crates/common/src/pbs/types/kzg.rs b/crates/common/src/pbs/types/kzg.rs index e5b3fe6f..165302ea 100644 --- a/crates/common/src/pbs/types/kzg.rs +++ b/crates/common/src/pbs/types/kzg.rs @@ -4,6 +4,7 @@ use std::{ }; use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use ssz::{Encode, Decode}; use ssz_types::VariableList; use tree_hash::{PackedEncoding, TreeHash}; @@ -21,6 +22,30 @@ impl From for [u8; 48] { } } +impl Decode for KzgCommitment { + fn is_ssz_fixed_len() -> bool { + <[u8; BYTES_PER_COMMITMENT] as ssz::Decode>::is_ssz_fixed_len() + } + + fn from_ssz_bytes(bytes: &[u8]) -> Result { + <[u8; BYTES_PER_COMMITMENT]>::from_ssz_bytes(bytes).and_then(|o| Ok(Self(o))) + } +} + +impl Encode for KzgCommitment { + fn is_ssz_fixed_len() -> bool { + <[u8; BYTES_PER_COMMITMENT] as ssz::Encode>::is_ssz_fixed_len() + } + + fn ssz_append(&self, buf: &mut Vec) { + self.0.ssz_append(buf) + } + + fn ssz_bytes_len(&self) -> usize { + self.0.ssz_bytes_len() + } +} + impl TreeHash for KzgCommitment { fn tree_hash_type() -> tree_hash::TreeHashType { <[u8; BYTES_PER_COMMITMENT] as TreeHash>::tree_hash_type() @@ -118,6 +143,20 @@ impl From<[u8; BYTES_PER_PROOF]> for KzgProof { } } +impl Encode for KzgProof { + fn is_ssz_fixed_len() -> bool { + <[u8; BYTES_PER_PROOF] as ssz::Encode>::is_ssz_fixed_len() + } + + fn ssz_append(&self, buf: &mut Vec) { + self.0.ssz_append(buf) + } + + fn ssz_bytes_len(&self) -> usize { + self.0.ssz_bytes_len() + } +} + impl Serialize for KzgProof { fn serialize(&self, serializer: S) -> Result where diff --git a/crates/common/src/pbs/types/utils.rs b/crates/common/src/pbs/types/utils.rs index 2aeb1793..1fc7e4c6 100644 --- a/crates/common/src/pbs/types/utils.rs +++ b/crates/common/src/pbs/types/utils.rs @@ -1,3 +1,5 @@ +use std::fmt; + use serde::{Deserialize, Serialize}; pub mod quoted_variable_list_u64 { @@ -40,4 +42,15 @@ pub enum Version { #[serde(rename = "deneb")] #[default] Deneb, + #[serde(rename = "electra")] + Electra, +} + +impl fmt::Display for Version { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Version::Deneb => write!(f, "deneb"), + Version::Electra => write!(f, "electra"), + } + } } diff --git a/crates/common/src/utils.rs b/crates/common/src/utils.rs index 42cb0427..378e5aa3 100644 --- a/crates/common/src/utils.rs +++ b/crates/common/src/utils.rs @@ -1,16 +1,23 @@ use std::{ + fmt, net::Ipv4Addr, + str::FromStr, time::{SystemTime, UNIX_EPOCH}, }; - +use bytes::Bytes; +use axum::{ + extract::{FromRequest, Request}, + response::{IntoResponse, Response}, + http::HeaderValue +}; use alloy::{ primitives::U256, rpc::types::beacon::{BlsPublicKey, BlsSignature}, }; -use axum::http::HeaderValue; use blst::min_pk::{PublicKey, Signature}; +use mediatype::{names, MediaType, MediaTypeList}; use rand::{distributions::Alphanumeric, Rng}; -use reqwest::header::HeaderMap; +use reqwest::{header::{HeaderMap, ACCEPT, CONTENT_TYPE}, StatusCode}; use serde::{de::DeserializeOwned, Serialize}; use serde_json::Value; use tracing::Level; @@ -24,6 +31,7 @@ use crate::{ }; const MILLIS_PER_SECOND: u64 = 1_000; +pub const CONSENSUS_VERSION_HEADER: &str = "Eth-Consensus-Version"; pub fn timestamp_of_slot_start_sec(slot: u64, chain: Chain) -> u64 { chain.genesis_time_sec() + slot * chain.slot_time_sec() @@ -273,6 +281,158 @@ pub fn get_user_agent_with_version(req_headers: &HeaderMap) -> eyre::Result Accept { + Accept::from_str( + req_headers.get(ACCEPT).and_then(|value| value.to_str().ok()).unwrap_or("application/json"), + ) + .unwrap_or(Accept::Json) +} + +/// Parse CONTENT TYPE header, default to JSON if missing or mal-formatted +pub fn get_content_type_header(req_headers: &HeaderMap) -> ContentType { + ContentType::from_str( + req_headers.get(CONTENT_TYPE).and_then(|value| value.to_str().ok()).unwrap_or("application/json"), + ) + .unwrap_or(ContentType::Json) +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum ContentType { + Json, + Ssz +} + +impl std::fmt::Display for ContentType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ContentType::Json => write!(f, "application/json"), + ContentType::Ssz => write!(f, "application/octet-stream"), + } + } +} + +impl FromStr for ContentType { + type Err = String; + fn from_str(value: &str) -> Result { + match value { + "application/json" => Ok(ContentType::Json), + "application/octet-stream" => Ok(ContentType::Ssz), + _ => Err(format!("unknown content type: {}", value)), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum Accept { + Json, + Ssz, + Any, +} + +impl fmt::Display for Accept { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Accept::Ssz => write!(f, "application/octet-stream"), + Accept::Json => write!(f, "application/json"), + Accept::Any => write!(f, "*/*"), + } + } +} + +impl FromStr for Accept { + type Err = String; + + fn from_str(s: &str) -> Result { + let media_type_list = MediaTypeList::new(s); + + // [q-factor weighting]: https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.2 + // find the highest q-factor supported accept type + let mut highest_q = 0_u16; + let mut accept_type = None; + + const APPLICATION: &str = names::APPLICATION.as_str(); + const OCTET_STREAM: &str = names::OCTET_STREAM.as_str(); + const JSON: &str = names::JSON.as_str(); + const STAR: &str = names::_STAR.as_str(); + const Q: &str = names::Q.as_str(); + + media_type_list.into_iter().for_each(|item| { + if let Ok(MediaType { ty, subty, suffix: _, params }) = item { + let q_accept = match (ty.as_str(), subty.as_str()) { + (APPLICATION, OCTET_STREAM) => Some(Accept::Ssz), + (APPLICATION, JSON) => Some(Accept::Json), + (STAR, STAR) => Some(Accept::Any), + _ => None, + } + .map(|item_accept_type| { + let q_val = params + .iter() + .find_map(|(n, v)| match n.as_str() { + Q => { + Some((v.as_str().parse::().unwrap_or(0_f32) * 1000_f32) as u16) + } + _ => None, + }) + .or(Some(1000_u16)); + + (q_val.unwrap(), item_accept_type) + }); + + match q_accept { + Some((q, accept)) if q > highest_q => { + highest_q = q; + accept_type = Some(accept); + } + _ => (), + } + } + }); + accept_type.ok_or_else(|| "accept header is not supported".to_string()) + } +} + + +#[must_use] +#[derive(Debug, Clone, Copy, Default)] +pub struct JsonOrSsz(pub T); + +impl FromRequest for JsonOrSsz +where + T: serde::de::DeserializeOwned + ssz::Decode + 'static, + S: Send + Sync, +{ + type Rejection = Response; + + async fn from_request(req: Request, _state: &S) -> Result { + let headers = req.headers().clone(); + let content_type = headers + .get(CONTENT_TYPE) + .and_then(|value| value.to_str().ok()); + + let bytes = Bytes::from_request(req, _state) + .await + .map_err(IntoResponse::into_response)?; + + if let Some(content_type) = content_type { + if content_type.starts_with(&ContentType::Json.to_string()) { + let payload: T = serde_json::from_slice(&bytes) + .map_err(|_| StatusCode::BAD_REQUEST.into_response())?; + return Ok(Self(payload)); + } + + if content_type.starts_with(&ContentType::Ssz.to_string()) { + let payload = T::from_ssz_bytes(&bytes) + .map_err(|_| StatusCode::BAD_REQUEST.into_response())?; + return Ok(Self(payload)); + } + } + + Err(StatusCode::UNSUPPORTED_MEDIA_TYPE.into_response()) + } +} + + #[cfg(unix)] pub async fn wait_for_signal() -> eyre::Result<()> { use tokio::signal::unix::{signal, SignalKind}; diff --git a/crates/pbs/Cargo.toml b/crates/pbs/Cargo.toml index 5ef8bc64..1b259a59 100644 --- a/crates/pbs/Cargo.toml +++ b/crates/pbs/Cargo.toml @@ -11,6 +11,7 @@ cb-metrics.workspace = true # ethereum alloy.workspace = true +ethereum_ssz.workspace = true # networking axum.workspace = true diff --git a/crates/pbs/src/routes/get_header.rs b/crates/pbs/src/routes/get_header.rs index 919bad11..77b0395a 100644 --- a/crates/pbs/src/routes/get_header.rs +++ b/crates/pbs/src/routes/get_header.rs @@ -1,14 +1,15 @@ use alloy::primitives::utils::format_ether; use axum::{ extract::{Path, State}, - http::HeaderMap, + http::{HeaderMap, HeaderValue}, response::IntoResponse, }; use cb_common::{ pbs::{BuilderEvent, GetHeaderParams}, - utils::{get_user_agent, ms_into_slot}, + utils::{get_accept_header, get_user_agent, ms_into_slot, Accept, CONSENSUS_VERSION_HEADER}, }; -use reqwest::StatusCode; +use reqwest::{header::CONTENT_TYPE, StatusCode}; +use ssz::Encode; use tracing::{error, info}; use uuid::Uuid; @@ -33,17 +34,37 @@ pub async fn handle_get_header>( let ua = get_user_agent(&req_headers); let ms_into_slot = ms_into_slot(params.slot, state.config.chain); + let accept_header = get_accept_header(&req_headers); info!(ua, parent_hash=%params.parent_hash, validator_pubkey=%params.pubkey, ms_into_slot); match A::get_header(params, req_headers, state.clone()).await { Ok(res) => { state.publish_event(BuilderEvent::GetHeaderResponse(Box::new(res.clone()))); - if let Some(max_bid) = res { info!(value_eth = format_ether(max_bid.value()), block_hash =% max_bid.block_hash(), "received header"); - BEACON_NODE_STATUS.with_label_values(&["200", GET_HEADER_ENDPOINT_TAG]).inc(); - Ok((StatusCode::OK, axum::Json(max_bid)).into_response()) + let response = match accept_header { + Accept::Ssz => { + let mut res = { + info!("sending response as JSON"); + (StatusCode::OK, max_bid.data.as_ssz_bytes()).into_response() + }; + let Ok(consensus_version_header) = HeaderValue::from_str(&format!("{}", max_bid.version)) else { + info!("sending response as JSON"); + return Ok((StatusCode::OK, axum::Json(max_bid)).into_response()) + }; + let Ok(content_type_header) = HeaderValue::from_str(&format!("{}", Accept::Ssz)) else { + info!("sending response as JSON"); + return Ok((StatusCode::OK, axum::Json(max_bid)).into_response()) + }; + res.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + res.headers_mut().insert(CONTENT_TYPE, content_type_header); + info!("sending response as SSZ"); + res + }, + Accept::Json | Accept::Any => (StatusCode::OK, axum::Json(max_bid)).into_response(), + }; + Ok(response) } else { // spec: return 204 if request is valid but no bid available info!("no header available for slot"); diff --git a/crates/pbs/src/routes/submit_block.rs b/crates/pbs/src/routes/submit_block.rs index c9d206a1..8e7b7072 100644 --- a/crates/pbs/src/routes/submit_block.rs +++ b/crates/pbs/src/routes/submit_block.rs @@ -1,9 +1,10 @@ -use axum::{extract::State, http::HeaderMap, response::IntoResponse, Json}; +use axum::{extract::State, http::{HeaderMap, HeaderValue}, response::IntoResponse, Json}; use cb_common::{ pbs::{BuilderEvent, SignedBlindedBeaconBlock}, - utils::{get_user_agent, timestamp_of_slot_start_millis, utcnow_ms}, + utils::{get_content_type_header, get_user_agent, timestamp_of_slot_start_millis, utcnow_ms, JsonOrSsz, CONSENSUS_VERSION_HEADER}, }; -use reqwest::StatusCode; +use reqwest::{header::CONTENT_TYPE, StatusCode}; +use ssz::Encode; use tracing::{error, info, trace}; use uuid::Uuid; @@ -19,7 +20,7 @@ use crate::{ pub async fn handle_submit_block>( State(state): State>, req_headers: HeaderMap, - Json(signed_blinded_block): Json, + JsonOrSsz(signed_blinded_block): JsonOrSsz, ) -> Result { let state = state.read().clone(); @@ -31,6 +32,8 @@ pub async fn handle_submit_block>( let block_hash = signed_blinded_block.message.body.execution_payload_header.block_hash; let slot_start_ms = timestamp_of_slot_start_millis(slot, state.config.chain); let ua = get_user_agent(&req_headers); + let content_type_header = get_content_type_header(&req_headers); + info!(ua, ms_into_slot=now.saturating_sub(slot_start_ms), %block_hash); @@ -39,9 +42,31 @@ pub async fn handle_submit_block>( trace!(?res); state.publish_event(BuilderEvent::SubmitBlockResponse(Box::new(res.clone()))); info!("received unblinded block"); - BEACON_NODE_STATUS.with_label_values(&["200", SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG]).inc(); - Ok((StatusCode::OK, Json(res).into_response())) + + let response = match content_type_header { + cb_common::utils::ContentType::Json => { + info!("sending response as JSON"); + (StatusCode::OK, Json(res)).into_response() + }, + cb_common::utils::ContentType::Ssz => { + let mut response = (StatusCode::OK, res.data.as_ssz_bytes()).into_response(); + let Ok(consensus_version_header) = HeaderValue::from_str(&format!("{}", res.version)) else { + info!("sending response as JSON"); + return Ok((StatusCode::OK, axum::Json(res)).into_response()) + }; + let Ok(content_type_header) = HeaderValue::from_str(&content_type_header.to_string()) else { + info!("sending response as JSON"); + return Ok((StatusCode::OK, axum::Json(res)).into_response()) + }; + response.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + response.headers_mut().insert(CONTENT_TYPE, content_type_header); + info!("sending response as SSZ"); + response + }, + }; + + Ok(response) } Err(err) => { From 50595934e84181cc13de3bfc6f6ba8b877de3c83 Mon Sep 17 00:00:00 2001 From: Eitan Seri-Levi Date: Tue, 18 Feb 2025 13:22:26 -0800 Subject: [PATCH 02/11] Integration tests, fixes, and clippy/linting --- Cargo.lock | 1 + crates/common/src/pbs/types/beacon_block.rs | 4 +- .../src/pbs/types/blinded_block_body.rs | 36 ++++----- crates/common/src/pbs/types/get_header.rs | 6 +- crates/common/src/pbs/types/kzg.rs | 6 +- crates/common/src/utils.rs | 37 +++++----- crates/pbs/src/routes/get_header.rs | 27 ++++--- crates/pbs/src/routes/submit_block.rs | 41 ++++++---- tests/Cargo.toml | 2 + tests/src/mock_relay.rs | 37 +++++++--- tests/src/mock_validator.rs | 51 +++++++++++-- tests/tests/pbs_integration.rs | 74 +++++++++++++++++-- 12 files changed, 229 insertions(+), 93 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0cebdf56..49a5728b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1410,6 +1410,7 @@ dependencies = [ "axum", "cb-common", "cb-pbs", + "ethereum_ssz 0.7.1", "eyre", "reqwest", "serde_json", diff --git a/crates/common/src/pbs/types/beacon_block.rs b/crates/common/src/pbs/types/beacon_block.rs index 289637bf..a1fd8d11 100644 --- a/crates/common/src/pbs/types/beacon_block.rs +++ b/crates/common/src/pbs/types/beacon_block.rs @@ -7,7 +7,7 @@ use super::{ execution_payload::ExecutionPayload, spec::DenebSpec, utils::VersionedResponse, }; -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] /// Sent to relays in submit_block pub struct SignedBlindedBeaconBlock { pub message: BlindedBeaconBlock, @@ -20,7 +20,7 @@ impl SignedBlindedBeaconBlock { } } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] pub struct BlindedBeaconBlock { #[serde(with = "serde_utils::quoted_u64")] pub slot: u64, diff --git a/crates/common/src/pbs/types/blinded_block_body.rs b/crates/common/src/pbs/types/blinded_block_body.rs index 20785954..ebad3671 100644 --- a/crates/common/src/pbs/types/blinded_block_body.rs +++ b/crates/common/src/pbs/types/blinded_block_body.rs @@ -3,7 +3,7 @@ use alloy::{ rpc::types::beacon::{BlsPublicKey, BlsSignature}, }; use serde::{Deserialize, Serialize}; -use ssz_derive::Decode; +use ssz_derive::{Decode, Encode}; use ssz_types::{typenum, BitList, BitVector, FixedVector, VariableList}; use super::{ @@ -11,7 +11,7 @@ use super::{ }; use crate::utils::as_str; -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] pub struct BlindedBeaconBlockBody { pub randao_reveal: BlsSignature, pub eth1_data: Eth1Data, @@ -28,7 +28,7 @@ pub struct BlindedBeaconBlockBody { pub blob_kzg_commitments: KzgCommitments, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] pub struct Eth1Data { pub deposit_root: B256, #[serde(with = "serde_utils::quoted_u64")] @@ -36,7 +36,7 @@ pub struct Eth1Data { pub block_hash: B256, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] pub struct BeaconBlockHeader { #[serde(with = "serde_utils::quoted_u64")] pub slot: u64, @@ -47,13 +47,13 @@ pub struct BeaconBlockHeader { pub body_root: B256, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] pub struct SignedBeaconBlockHeader { pub message: BeaconBlockHeader, pub signature: BlsSignature, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] pub struct BlsToExecutionChange { #[serde(with = "as_str")] pub validator_index: u64, @@ -61,25 +61,25 @@ pub struct BlsToExecutionChange { pub to_execution_address: Address, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] pub struct SignedBlsToExecutionChange { pub message: BlsToExecutionChange, pub signature: BlsSignature, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] pub struct ProposerSlashing { pub signed_header_1: SignedBeaconBlockHeader, pub signed_header_2: SignedBeaconBlockHeader, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] pub struct AttesterSlashing { pub attestation_1: IndexedAttestation, pub attestation_2: IndexedAttestation, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] #[serde(bound = "T: EthSpec")] pub struct IndexedAttestation { /// Lists validator registry indices, not committee indices. @@ -89,7 +89,7 @@ pub struct IndexedAttestation { pub signature: BlsSignature, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] pub struct AttestationData { #[serde(with = "serde_utils::quoted_u64")] pub slot: u64, @@ -102,14 +102,14 @@ pub struct AttestationData { pub target: Checkpoint, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] pub struct Checkpoint { #[serde(with = "serde_utils::quoted_u64")] pub epoch: u64, pub root: B256, } -#[derive(Debug, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode)] #[serde(bound = "T: EthSpec")] pub struct Attestation { pub aggregation_bits: BitList, @@ -117,13 +117,13 @@ pub struct Attestation { pub signature: BlsSignature, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] pub struct Deposit { pub proof: FixedVector, // put this in EthSpec? pub data: DepositData, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] pub struct DepositData { pub pubkey: BlsPublicKey, pub withdrawal_credentials: B256, @@ -132,13 +132,13 @@ pub struct DepositData { pub signature: BlsSignature, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] pub struct SignedVoluntaryExit { pub message: VoluntaryExit, pub signature: BlsSignature, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] pub struct VoluntaryExit { /// Earliest epoch when voluntary exit can be processed. #[serde(with = "serde_utils::quoted_u64")] @@ -147,7 +147,7 @@ pub struct VoluntaryExit { pub validator_index: u64, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Decode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] #[serde(bound = "T: EthSpec")] pub struct SyncAggregate { pub sync_committee_bits: BitVector, diff --git a/crates/common/src/pbs/types/get_header.rs b/crates/common/src/pbs/types/get_header.rs index 55b893db..796c8fcb 100644 --- a/crates/common/src/pbs/types/get_header.rs +++ b/crates/common/src/pbs/types/get_header.rs @@ -3,7 +3,7 @@ use alloy::{ rpc::types::beacon::{BlsPublicKey, BlsSignature}, }; use serde::{Deserialize, Serialize}; -use ssz_derive::Encode; +use ssz_derive::{Decode, Encode}; use tree_hash_derive::TreeHash; use super::{ @@ -38,13 +38,13 @@ impl GetHeaderResponse { } } -#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] pub struct SignedExecutionPayloadHeader { pub message: ExecutionPayloadHeaderMessage, pub signature: BlsSignature, } -#[derive(Debug, Default, Clone, Serialize, Deserialize, TreeHash, Encode)] +#[derive(Debug, Default, Clone, Serialize, Deserialize, TreeHash, Encode, Decode)] pub struct ExecutionPayloadHeaderMessage { pub header: ExecutionPayloadHeader, pub blob_kzg_commitments: KzgCommitments, diff --git a/crates/common/src/pbs/types/kzg.rs b/crates/common/src/pbs/types/kzg.rs index 165302ea..61d618be 100644 --- a/crates/common/src/pbs/types/kzg.rs +++ b/crates/common/src/pbs/types/kzg.rs @@ -4,7 +4,7 @@ use std::{ }; use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use ssz::{Encode, Decode}; +use ssz::{Decode, Encode}; use ssz_types::VariableList; use tree_hash::{PackedEncoding, TreeHash}; @@ -28,7 +28,7 @@ impl Decode for KzgCommitment { } fn from_ssz_bytes(bytes: &[u8]) -> Result { - <[u8; BYTES_PER_COMMITMENT]>::from_ssz_bytes(bytes).and_then(|o| Ok(Self(o))) + <[u8; BYTES_PER_COMMITMENT]>::from_ssz_bytes(bytes).map(Self) } } @@ -38,7 +38,7 @@ impl Encode for KzgCommitment { } fn ssz_append(&self, buf: &mut Vec) { - self.0.ssz_append(buf) + self.0.ssz_append(buf) } fn ssz_bytes_len(&self) -> usize { diff --git a/crates/common/src/utils.rs b/crates/common/src/utils.rs index 378e5aa3..0a4c88a0 100644 --- a/crates/common/src/utils.rs +++ b/crates/common/src/utils.rs @@ -4,20 +4,24 @@ use std::{ str::FromStr, time::{SystemTime, UNIX_EPOCH}, }; -use bytes::Bytes; -use axum::{ - extract::{FromRequest, Request}, - response::{IntoResponse, Response}, - http::HeaderValue -}; + use alloy::{ primitives::U256, rpc::types::beacon::{BlsPublicKey, BlsSignature}, }; +use axum::{ + extract::{FromRequest, Request}, + http::HeaderValue, + response::{IntoResponse, Response}, +}; use blst::min_pk::{PublicKey, Signature}; +use bytes::Bytes; use mediatype::{names, MediaType, MediaTypeList}; use rand::{distributions::Alphanumeric, Rng}; -use reqwest::{header::{HeaderMap, ACCEPT, CONTENT_TYPE}, StatusCode}; +use reqwest::{ + header::{HeaderMap, ACCEPT, CONTENT_TYPE}, + StatusCode, +}; use serde::{de::DeserializeOwned, Serialize}; use serde_json::Value; use tracing::Level; @@ -292,7 +296,10 @@ pub fn get_accept_header(req_headers: &HeaderMap) -> Accept { /// Parse CONTENT TYPE header, default to JSON if missing or mal-formatted pub fn get_content_type_header(req_headers: &HeaderMap) -> ContentType { ContentType::from_str( - req_headers.get(CONTENT_TYPE).and_then(|value| value.to_str().ok()).unwrap_or("application/json"), + req_headers + .get(CONTENT_TYPE) + .and_then(|value| value.to_str().ok()) + .unwrap_or("application/json"), ) .unwrap_or(ContentType::Json) } @@ -300,7 +307,7 @@ pub fn get_content_type_header(req_headers: &HeaderMap) -> ContentType { #[derive(Debug, Clone, Copy, PartialEq)] pub enum ContentType { Json, - Ssz + Ssz, } impl std::fmt::Display for ContentType { @@ -318,7 +325,7 @@ impl FromStr for ContentType { match value { "application/json" => Ok(ContentType::Json), "application/octet-stream" => Ok(ContentType::Ssz), - _ => Err(format!("unknown content type: {}", value)), + _ => Ok(ContentType::Json), } } } @@ -392,7 +399,6 @@ impl FromStr for Accept { } } - #[must_use] #[derive(Debug, Clone, Copy, Default)] pub struct JsonOrSsz(pub T); @@ -406,13 +412,9 @@ where async fn from_request(req: Request, _state: &S) -> Result { let headers = req.headers().clone(); - let content_type = headers - .get(CONTENT_TYPE) - .and_then(|value| value.to_str().ok()); + let content_type = headers.get(CONTENT_TYPE).and_then(|value| value.to_str().ok()); - let bytes = Bytes::from_request(req, _state) - .await - .map_err(IntoResponse::into_response)?; + let bytes = Bytes::from_request(req, _state).await.map_err(IntoResponse::into_response)?; if let Some(content_type) = content_type { if content_type.starts_with(&ContentType::Json.to_string()) { @@ -432,7 +434,6 @@ where } } - #[cfg(unix)] pub async fn wait_for_signal() -> eyre::Result<()> { use tokio::signal::unix::{signal, SignalKind}; diff --git a/crates/pbs/src/routes/get_header.rs b/crates/pbs/src/routes/get_header.rs index 77b0395a..2e859aef 100644 --- a/crates/pbs/src/routes/get_header.rs +++ b/crates/pbs/src/routes/get_header.rs @@ -45,24 +45,29 @@ pub async fn handle_get_header>( BEACON_NODE_STATUS.with_label_values(&["200", GET_HEADER_ENDPOINT_TAG]).inc(); let response = match accept_header { Accept::Ssz => { - let mut res = { + let mut res = + { (StatusCode::OK, max_bid.data.as_ssz_bytes()).into_response() }; + let Ok(consensus_version_header) = + HeaderValue::from_str(&format!("{}", max_bid.version)) + else { info!("sending response as JSON"); - (StatusCode::OK, max_bid.data.as_ssz_bytes()).into_response() + return Ok((StatusCode::OK, axum::Json(max_bid)).into_response()); }; - let Ok(consensus_version_header) = HeaderValue::from_str(&format!("{}", max_bid.version)) else { + let Ok(content_type_header) = + HeaderValue::from_str(&format!("{}", Accept::Ssz)) + else { info!("sending response as JSON"); - return Ok((StatusCode::OK, axum::Json(max_bid)).into_response()) + return Ok((StatusCode::OK, axum::Json(max_bid)).into_response()); }; - let Ok(content_type_header) = HeaderValue::from_str(&format!("{}", Accept::Ssz)) else { - info!("sending response as JSON"); - return Ok((StatusCode::OK, axum::Json(max_bid)).into_response()) - }; - res.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + res.headers_mut() + .insert(CONSENSUS_VERSION_HEADER, consensus_version_header); res.headers_mut().insert(CONTENT_TYPE, content_type_header); info!("sending response as SSZ"); res - }, - Accept::Json | Accept::Any => (StatusCode::OK, axum::Json(max_bid)).into_response(), + } + Accept::Json | Accept::Any => { + (StatusCode::OK, axum::Json(max_bid)).into_response() + } }; Ok(response) } else { diff --git a/crates/pbs/src/routes/submit_block.rs b/crates/pbs/src/routes/submit_block.rs index 8e7b7072..c40d098c 100644 --- a/crates/pbs/src/routes/submit_block.rs +++ b/crates/pbs/src/routes/submit_block.rs @@ -1,7 +1,15 @@ -use axum::{extract::State, http::{HeaderMap, HeaderValue}, response::IntoResponse, Json}; +use axum::{ + extract::State, + http::{HeaderMap, HeaderValue}, + response::IntoResponse, + Json, +}; use cb_common::{ pbs::{BuilderEvent, SignedBlindedBeaconBlock}, - utils::{get_content_type_header, get_user_agent, timestamp_of_slot_start_millis, utcnow_ms, JsonOrSsz, CONSENSUS_VERSION_HEADER}, + utils::{ + get_accept_header, get_user_agent, timestamp_of_slot_start_millis, utcnow_ms, ContentType, + JsonOrSsz, CONSENSUS_VERSION_HEADER, + }, }; use reqwest::{header::CONTENT_TYPE, StatusCode}; use ssz::Encode; @@ -32,8 +40,7 @@ pub async fn handle_submit_block>( let block_hash = signed_blinded_block.message.body.execution_payload_header.block_hash; let slot_start_ms = timestamp_of_slot_start_millis(slot, state.config.chain); let ua = get_user_agent(&req_headers); - let content_type_header = get_content_type_header(&req_headers); - + let accept_header = get_accept_header(&req_headers); info!(ua, ms_into_slot=now.saturating_sub(slot_start_ms), %block_hash); @@ -44,26 +51,32 @@ pub async fn handle_submit_block>( info!("received unblinded block"); BEACON_NODE_STATUS.with_label_values(&["200", SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG]).inc(); - let response = match content_type_header { - cb_common::utils::ContentType::Json => { + let response = match accept_header { + cb_common::utils::Accept::Json | cb_common::utils::Accept::Any => { info!("sending response as JSON"); (StatusCode::OK, Json(res)).into_response() - }, - cb_common::utils::ContentType::Ssz => { + } + cb_common::utils::Accept::Ssz => { let mut response = (StatusCode::OK, res.data.as_ssz_bytes()).into_response(); - let Ok(consensus_version_header) = HeaderValue::from_str(&format!("{}", res.version)) else { + let Ok(consensus_version_header) = + HeaderValue::from_str(&format!("{}", res.version)) + else { info!("sending response as JSON"); - return Ok((StatusCode::OK, axum::Json(res)).into_response()) + return Ok((StatusCode::OK, axum::Json(res)).into_response()); }; - let Ok(content_type_header) = HeaderValue::from_str(&content_type_header.to_string()) else { + let Ok(content_type_header) = + HeaderValue::from_str(&ContentType::Ssz.to_string()) + else { info!("sending response as JSON"); - return Ok((StatusCode::OK, axum::Json(res)).into_response()) + return Ok((StatusCode::OK, axum::Json(res)).into_response()); }; - response.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + response + .headers_mut() + .insert(CONSENSUS_VERSION_HEADER, consensus_version_header); response.headers_mut().insert(CONTENT_TYPE, content_type_header); info!("sending response as SSZ"); response - }, + } }; Ok(response) diff --git a/tests/Cargo.toml b/tests/Cargo.toml index 04b66330..17491294 100644 --- a/tests/Cargo.toml +++ b/tests/Cargo.toml @@ -23,3 +23,5 @@ tracing-subscriber.workspace = true tree_hash.workspace = true eyre.workspace = true + +ethereum_ssz.workspace = true diff --git a/tests/src/mock_relay.rs b/tests/src/mock_relay.rs index 672ca806..f338e220 100644 --- a/tests/src/mock_relay.rs +++ b/tests/src/mock_relay.rs @@ -9,22 +9,28 @@ use std::{ use alloy::{primitives::U256, rpc::types::beacon::relay::ValidatorRegistration}; use axum::{ extract::{Path, State}, - http::StatusCode, + http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Response}, routing::{get, post}, Json, Router, }; use cb_common::{ pbs::{ - GetHeaderParams, GetHeaderResponse, SubmitBlindedBlockResponse, BUILDER_API_PATH, - GET_HEADER_PATH, GET_STATUS_PATH, REGISTER_VALIDATOR_PATH, SUBMIT_BLOCK_PATH, + GetHeaderParams, GetHeaderResponse, PayloadAndBlobs, SubmitBlindedBlockResponse, Version, + BUILDER_API_PATH, GET_HEADER_PATH, GET_STATUS_PATH, REGISTER_VALIDATOR_PATH, + SUBMIT_BLOCK_PATH, }, signature::sign_builder_root, signer::BlsSecretKey, types::Chain, - utils::{blst_pubkey_to_alloy, timestamp_of_slot_start_sec}, + utils::{ + blst_pubkey_to_alloy, get_content_type_header, timestamp_of_slot_start_sec, + CONSENSUS_VERSION_HEADER, + }, }; use cb_pbs::MAX_SIZE_SUBMIT_BLOCK; +use reqwest::header::CONTENT_TYPE; +use ssz::Encode; use tokio::net::TcpListener; use tracing::debug; use tree_hash::TreeHash; @@ -125,14 +131,27 @@ async fn handle_register_validator( StatusCode::OK } -async fn handle_submit_block(State(state): State>) -> impl IntoResponse { +async fn handle_submit_block( + State(state): State>, + headers: HeaderMap, +) -> impl IntoResponse { state.received_submit_block.fetch_add(1, Ordering::Relaxed); - - let response = if state.large_body { + let accept_header = get_content_type_header(&headers); + let data = if state.large_body { vec![1u8; 1 + MAX_SIZE_SUBMIT_BLOCK] } else { - serde_json::to_vec(&SubmitBlindedBlockResponse::default()).unwrap() + match accept_header { + cb_common::utils::ContentType::Json => { + serde_json::to_vec(&SubmitBlindedBlockResponse::default()).unwrap() + } + cb_common::utils::ContentType::Ssz => PayloadAndBlobs::default().as_ssz_bytes(), + } }; - (StatusCode::OK, Json(response)).into_response() + let mut response = (StatusCode::OK, data).into_response(); + let consensus_version_header = HeaderValue::from_str(&Version::Deneb.to_string()).unwrap(); + let content_type_header = HeaderValue::from_str(&accept_header.to_string()).unwrap(); + response.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + response.headers_mut().insert(CONTENT_TYPE, content_type_header); + response } diff --git a/tests/src/mock_validator.rs b/tests/src/mock_validator.rs index 9b7ff2c6..5ef01eb8 100644 --- a/tests/src/mock_validator.rs +++ b/tests/src/mock_validator.rs @@ -2,8 +2,18 @@ use alloy::{ primitives::B256, rpc::types::beacon::{relay::ValidatorRegistration, BlsPublicKey}, }; -use cb_common::pbs::{GetHeaderResponse, RelayClient, SignedBlindedBeaconBlock}; -use reqwest::Error; +use cb_common::{ + pbs::{ + GetHeaderResponse, RelayClient, SignedBlindedBeaconBlock, SignedExecutionPayloadHeader, + Version, + }, + utils::{get_content_type_header, Accept, ContentType, CONSENSUS_VERSION_HEADER}, +}; +use reqwest::{ + header::{ACCEPT, CONTENT_TYPE}, + Error, +}; +use ssz::{Decode, Encode}; use crate::utils::generate_mock_relay; @@ -16,13 +26,28 @@ impl MockValidator { Ok(Self { comm_boost: generate_mock_relay(port, BlsPublicKey::default())? }) } - pub async fn do_get_header(&self, pubkey: Option) -> Result<(), Error> { + pub async fn do_get_header( + &self, + pubkey: Option, + accept: Accept, + ) -> Result<(), Error> { let url = self .comm_boost .get_header_url(0, B256::ZERO, pubkey.unwrap_or(BlsPublicKey::ZERO)) .unwrap(); - let res = self.comm_boost.client.get(url).send().await?.bytes().await?; - assert!(serde_json::from_slice::(&res).is_ok()); + let res = + self.comm_boost.client.get(url).header(ACCEPT, &accept.to_string()).send().await?; + let content_type = get_content_type_header(res.headers()); + let res_bytes = res.bytes().await?; + + match content_type { + ContentType::Json => { + assert!(serde_json::from_slice::(&res_bytes).is_ok()) + } + ContentType::Ssz => { + assert!(SignedExecutionPayloadHeader::from_ssz_bytes(&res_bytes).is_ok()) + } + } Ok(()) } @@ -50,15 +75,27 @@ impl MockValidator { Ok(()) } - pub async fn do_submit_block(&self) -> Result<(), Error> { + pub async fn do_submit_block( + &self, + accept: Accept, + content_type: ContentType, + ) -> Result<(), Error> { let url = self.comm_boost.submit_block_url().unwrap(); let signed_blinded_block = SignedBlindedBeaconBlock::default(); + let body = match content_type { + ContentType::Json => serde_json::to_vec(&signed_blinded_block).unwrap(), + ContentType::Ssz => signed_blinded_block.as_ssz_bytes(), + }; + self.comm_boost .client .post(url) - .json(&signed_blinded_block) + .body(body) + .header(CONSENSUS_VERSION_HEADER, Version::Deneb.to_string()) + .header(CONTENT_TYPE, &content_type.to_string()) + .header(ACCEPT, &accept.to_string()) .send() .await? .error_for_status()?; diff --git a/tests/tests/pbs_integration.rs b/tests/tests/pbs_integration.rs index 2e7f95e8..a230939b 100644 --- a/tests/tests/pbs_integration.rs +++ b/tests/tests/pbs_integration.rs @@ -12,7 +12,7 @@ use cb_common::{ pbs::RelayClient, signer::{random_secret, BlsPublicKey}, types::Chain, - utils::blst_pubkey_to_alloy, + utils::{blst_pubkey_to_alloy, Accept, ContentType}, }; use cb_pbs::{DefaultBuilderApi, PbsService, PbsState}; use cb_tests::{ @@ -76,7 +76,36 @@ async fn test_get_header() -> Result<()> { let mock_validator = MockValidator::new(port)?; info!("Sending get header"); - let res = mock_validator.do_get_header(None).await; + let res = mock_validator.do_get_header(None, Accept::Json).await; + + assert!(res.is_ok()); + assert_eq!(mock_state.received_get_header(), 1); + Ok(()) +} + +#[tokio::test] +async fn test_get_header_ssz() -> Result<()> { + setup_test_env(); + let signer = random_secret(); + let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); + + let chain = Chain::Holesky; + let port = 3000; + + let mock_state = Arc::new(MockRelayState::new(chain, signer)); + let mock_relay = generate_mock_relay(port + 1, *pubkey)?; + tokio::spawn(start_mock_relay_service(mock_state.clone(), port + 1)); + + let config = to_pbs_config(chain, get_pbs_static_config(port), vec![mock_relay]); + let state = PbsState::new(config); + tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); + + // leave some time to start servers + tokio::time::sleep(Duration::from_millis(100)).await; + + let mock_validator = MockValidator::new(port)?; + info!("Sending get header"); + let res = mock_validator.do_get_header(None, Accept::Ssz).await; assert!(res.is_ok()); assert_eq!(mock_state.received_get_header(), 1); @@ -207,9 +236,38 @@ async fn test_submit_block() -> Result<()> { let mock_validator = MockValidator::new(port)?; info!("Sending submit block"); - let res = mock_validator.do_submit_block().await; + let res = mock_validator.do_submit_block(Accept::Json, ContentType::Json).await; - assert!(res.is_err()); + assert!(!res.is_err()); + assert_eq!(mock_state.received_submit_block(), 1); + Ok(()) +} + +#[tokio::test] +async fn test_submit_block_ssz() -> Result<()> { + setup_test_env(); + let signer = random_secret(); + let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); + + let chain = Chain::Holesky; + let port = 3400; + + let relays = vec![generate_mock_relay(port + 1, *pubkey)?]; + let mock_state = Arc::new(MockRelayState::new(chain, signer)); + tokio::spawn(start_mock_relay_service(mock_state.clone(), port + 1)); + + let config = to_pbs_config(chain, get_pbs_static_config(port), relays); + let state = PbsState::new(config); + tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); + + // leave some time to start servers + tokio::time::sleep(Duration::from_millis(100)).await; + + let mock_validator = MockValidator::new(port)?; + info!("Sending submit block"); + let res = mock_validator.do_submit_block(Accept::Ssz, ContentType::Ssz).await; + + assert!(!res.is_err()); assert_eq!(mock_state.received_submit_block(), 1); Ok(()) } @@ -236,7 +294,7 @@ async fn test_submit_block_too_large() -> Result<()> { let mock_validator = MockValidator::new(port)?; info!("Sending submit block"); - let res = mock_validator.do_submit_block().await; + let res = mock_validator.do_submit_block(Accept::Json, ContentType::Json).await; assert!(res.is_err()); assert_eq!(mock_state.received_submit_block(), 1); @@ -283,13 +341,13 @@ async fn test_mux() -> Result<()> { let mock_validator = MockValidator::new(port)?; info!("Sending get header with default"); - let res = mock_validator.do_get_header(None).await; + let res = mock_validator.do_get_header(None, Accept::Json).await; assert!(res.is_ok()); assert_eq!(mock_state.received_get_header(), 1); // only default relay was used info!("Sending get header with mux"); - let res = mock_validator.do_get_header(Some(validator_pubkey)).await; + let res = mock_validator.do_get_header(Some(validator_pubkey), Accept::Json).await; assert!(res.is_ok()); assert_eq!(mock_state.received_get_header(), 3); // two mux relays were used @@ -304,7 +362,7 @@ async fn test_mux() -> Result<()> { assert!(res.is_ok()); assert_eq!(mock_state.received_register_validator(), 3); // default + 2 mux relays were used - let res = mock_validator.do_submit_block().await; + let res = mock_validator.do_submit_block(Accept::Json, ContentType::Json).await; assert!(res.is_err()); assert_eq!(mock_state.received_submit_block(), 3); // default + 2 mux relays were used From 7005422f24ca90107b7b515830588c13747c752e Mon Sep 17 00:00:00 2001 From: Eitan Seri-Levi Date: Sun, 23 Feb 2025 11:13:33 -0800 Subject: [PATCH 03/11] fix tests --- tests/src/mock_relay.rs | 86 +++-- tests/tests/pbs_get_header.rs | 49 ++- tests/tests/pbs_integration.rs | 474 ------------------------- tests/tests/pbs_mux.rs | 10 +- tests/tests/pbs_post_blinded_blocks.rs | 44 ++- 5 files changed, 164 insertions(+), 499 deletions(-) delete mode 100644 tests/tests/pbs_integration.rs diff --git a/tests/src/mock_relay.rs b/tests/src/mock_relay.rs index abbf8700..8b14f93c 100644 --- a/tests/src/mock_relay.rs +++ b/tests/src/mock_relay.rs @@ -16,17 +16,17 @@ use axum::{ }; use cb_common::{ pbs::{ - ExecutionPayloadHeaderMessageDeneb, GetHeaderParams, GetHeaderResponse, + ExecutionPayloadHeaderMessageDeneb, ExecutionPayloadHeaderMessageElectra, GetHeaderParams, PayloadAndBlobsDeneb, PayloadAndBlobsElectra, SignedExecutionPayloadHeader, - SubmitBlindedBlockResponse, BUILDER_API_PATH, GET_HEADER_PATH, GET_STATUS_PATH, - REGISTER_VALIDATOR_PATH, SUBMIT_BLOCK_PATH, + SubmitBlindedBlockResponse, VersionedResponse, BUILDER_API_PATH, GET_HEADER_PATH, + GET_STATUS_PATH, REGISTER_VALIDATOR_PATH, SUBMIT_BLOCK_PATH, }, signature::sign_builder_root, signer::BlsSecretKey, types::Chain, utils::{ blst_pubkey_to_alloy, get_accept_header, get_consensus_version_header, - timestamp_of_slot_start_sec, CONSENSUS_VERSION_HEADER, + timestamp_of_slot_start_sec, Accept, ForkName, CONSENSUS_VERSION_HEADER, }, }; use cb_pbs::MAX_SIZE_SUBMIT_BLOCK; @@ -106,23 +106,67 @@ pub fn mock_relay_app_router(state: Arc) -> Router { async fn handle_get_header( State(state): State>, Path(GetHeaderParams { parent_hash, .. }): Path, + headers: HeaderMap, ) -> Response { state.received_get_header.fetch_add(1, Ordering::Relaxed); + let accept_header = get_accept_header(&headers); + let consensus_version_header = + get_consensus_version_header(&headers).unwrap_or(ForkName::Electra); + + let data = match consensus_version_header { + ForkName::Deneb => { + let mut response: SignedExecutionPayloadHeader = + SignedExecutionPayloadHeader::default(); + response.message.header.parent_hash = parent_hash; + response.message.header.block_hash.0[0] = 1; + response.message.value = U256::from(10); + response.message.pubkey = blst_pubkey_to_alloy(&state.signer.sk_to_pk()); + response.message.header.timestamp = timestamp_of_slot_start_sec(0, state.chain); + + let object_root = response.message.tree_hash_root().0; + response.signature = sign_builder_root(state.chain, &state.signer, object_root); + match accept_header { + Accept::Json | Accept::Any => { + let versioned_response: VersionedResponse< + SignedExecutionPayloadHeader, + SignedExecutionPayloadHeader, + > = VersionedResponse::Deneb(response); + serde_json::to_vec(&versioned_response).unwrap() + } + Accept::Ssz => response.as_ssz_bytes(), + } + } + ForkName::Electra => { + let mut response: SignedExecutionPayloadHeader = + SignedExecutionPayloadHeader::default(); + response.message.header.parent_hash = parent_hash; + response.message.header.block_hash.0[0] = 1; + response.message.value = U256::from(10); + response.message.pubkey = blst_pubkey_to_alloy(&state.signer.sk_to_pk()); + response.message.header.timestamp = timestamp_of_slot_start_sec(0, state.chain); + + let object_root = response.message.tree_hash_root().0; + response.signature = sign_builder_root(state.chain, &state.signer, object_root); + match accept_header { + Accept::Json | Accept::Any => { + let versioned_response: VersionedResponse< + SignedExecutionPayloadHeader, + SignedExecutionPayloadHeader, + > = VersionedResponse::Electra(response); + serde_json::to_vec(&versioned_response).unwrap() + } + Accept::Ssz => response.as_ssz_bytes(), + } + } + }; - let mut response: SignedExecutionPayloadHeader = - SignedExecutionPayloadHeader::default(); - - response.message.header.parent_hash = parent_hash; - response.message.header.block_hash.0[0] = 1; - response.message.value = U256::from(10); - response.message.pubkey = blst_pubkey_to_alloy(&state.signer.sk_to_pk()); - response.message.header.timestamp = timestamp_of_slot_start_sec(0, state.chain); - - let object_root = response.message.tree_hash_root().0; - response.signature = sign_builder_root(state.chain, &state.signer, object_root); - - let response = GetHeaderResponse::Deneb(response); - (StatusCode::OK, Json(response)).into_response() + let mut response = (StatusCode::OK, data).into_response(); + let consensus_version_header = + HeaderValue::from_str(&consensus_version_header.to_string()).unwrap(); + let content_type_header = HeaderValue::from_str(&accept_header.to_string()).unwrap(); + response.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + response.headers_mut().insert(CONTENT_TYPE, content_type_header); + response } async fn handle_get_status(State(state): State>) -> impl IntoResponse { @@ -145,8 +189,10 @@ async fn handle_submit_block( ) -> Response { state.received_submit_block.fetch_add(1, Ordering::Relaxed); let accept_header = get_accept_header(&headers); - let consensus_version_header = get_consensus_version_header(&headers).unwrap(); - let data = if state.large_body { + let consensus_version_header = + get_consensus_version_header(&headers).unwrap_or(ForkName::Electra); + + let data = if state.large_body() { vec![1u8; 1 + MAX_SIZE_SUBMIT_BLOCK] } else { match accept_header { diff --git a/tests/tests/pbs_get_header.rs b/tests/tests/pbs_get_header.rs index 519b2985..bc46cdf5 100644 --- a/tests/tests/pbs_get_header.rs +++ b/tests/tests/pbs_get_header.rs @@ -2,11 +2,11 @@ use std::{sync::Arc, time::Duration}; use alloy::primitives::{B256, U256}; use cb_common::{ - pbs::GetHeaderResponse, + pbs::{ExecutionPayloadHeaderMessageElectra, GetHeaderResponse, SignedExecutionPayloadHeader}, signature::sign_builder_root, signer::{random_secret, BlsPublicKey}, types::Chain, - utils::{blst_pubkey_to_alloy, timestamp_of_slot_start_sec, ForkName}, + utils::{blst_pubkey_to_alloy, timestamp_of_slot_start_sec, Accept, ForkName}, }; use cb_pbs::{DefaultBuilderApi, PbsService, PbsState}; use cb_tests::{ @@ -16,6 +16,7 @@ use cb_tests::{ }; use eyre::Result; use reqwest::StatusCode; +use ssz::Decode; use tracing::info; use tree_hash::TreeHash; @@ -69,6 +70,50 @@ async fn test_get_header() -> Result<()> { Ok(()) } +#[tokio::test] +async fn test_get_header_ssz() -> Result<()> { + setup_test_env(); + let signer = random_secret(); + let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); + + let chain = Chain::Holesky; + let pbs_port = 3200; + let relay_port = pbs_port + 1; + + // Run a mock relay + let mock_state = Arc::new(MockRelayState::new(chain, signer)); + let mock_relay = generate_mock_relay(relay_port, *pubkey)?; + tokio::spawn(start_mock_relay_service(mock_state.clone(), relay_port)); + + // Run the PBS service + let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), vec![mock_relay.clone()]); + let state = PbsState::new(config); + tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); + + // leave some time to start servers + tokio::time::sleep(Duration::from_millis(100)).await; + + let mock_validator = MockValidator::new(pbs_port)?; + info!("Sending get header"); + let res = mock_validator.do_get_header(None, Some(Accept::Ssz), ForkName::Electra).await?; + assert_eq!(res.status(), StatusCode::OK); + + let res: SignedExecutionPayloadHeader = + SignedExecutionPayloadHeader::from_ssz_bytes(&res.bytes().await?).unwrap(); + + assert_eq!(mock_state.received_get_header(), 1); + assert_eq!(res.message.header.block_hash.0[0], 1); + assert_eq!(res.message.header.parent_hash, B256::ZERO); + assert_eq!(res.message.value, U256::from(10)); + assert_eq!(res.message.pubkey, blst_pubkey_to_alloy(&mock_state.signer.sk_to_pk())); + assert_eq!(res.message.header.timestamp, timestamp_of_slot_start_sec(0, chain)); + assert_eq!( + res.signature, + sign_builder_root(chain, &mock_state.signer, res.message.tree_hash_root().0) + ); + Ok(()) +} + #[tokio::test] async fn test_get_header_returns_204_if_relay_down() -> Result<()> { setup_test_env(); diff --git a/tests/tests/pbs_integration.rs b/tests/tests/pbs_integration.rs deleted file mode 100644 index 3556037a..00000000 --- a/tests/tests/pbs_integration.rs +++ /dev/null @@ -1,474 +0,0 @@ -use std::{ - collections::HashMap, - net::{Ipv4Addr, SocketAddr}, - sync::Arc, - time::Duration, - u64, -}; - -use alloy::{primitives::U256, rpc::types::beacon::relay::ValidatorRegistration}; -use cb_common::{ - config::{PbsConfig, PbsModuleConfig, RuntimeMuxConfig}, - pbs::{ - ExecutionPayloadHeaderMessageDeneb, ExecutionPayloadHeaderMessageElectra, RelayClient, - SignedExecutionPayloadHeader, VersionedResponse, - }, - signer::{random_secret, BlsPublicKey}, - types::Chain, - utils::{ - blst_pubkey_to_alloy, get_consensus_version_header, get_content_type_header, Accept, - ContentType, ForkName, - }, -}; -use cb_pbs::{DefaultBuilderApi, PbsService, PbsState}; -use cb_tests::{ - mock_relay::{start_mock_relay_service, MockRelayState}, - mock_validator::MockValidator, - utils::{generate_mock_relay, generate_mock_relay_with_batch_size, setup_test_env}, -}; -use eyre::Result; -use ssz::Decode; -use tracing::info; - -fn get_pbs_static_config(port: u16) -> PbsConfig { - PbsConfig { - host: Ipv4Addr::UNSPECIFIED, - port, - wait_all_registrations: true, - relay_check: true, - timeout_get_header_ms: u64::MAX, - timeout_get_payload_ms: u64::MAX, - timeout_register_validator_ms: u64::MAX, - skip_sigverify: false, - min_bid_wei: U256::ZERO, - late_in_slot_time_ms: u64::MAX, - extra_validation_enabled: false, - rpc_url: None, - } -} - -fn to_pbs_config(chain: Chain, pbs_config: PbsConfig, relays: Vec) -> PbsModuleConfig { - PbsModuleConfig { - chain, - endpoint: SocketAddr::new(pbs_config.host.into(), pbs_config.port), - pbs_config: Arc::new(pbs_config), - signer_client: None, - event_publisher: None, - all_relays: relays.clone(), - relays, - muxes: None, - } -} - -#[tokio::test] -async fn test_get_header() -> Result<()> { - setup_test_env(); - let signer = random_secret(); - let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); - - let chain = Chain::Holesky; - let port = 3000; - - let mock_state = Arc::new(MockRelayState::new(chain, signer)); - let mock_relay = generate_mock_relay(port + 1, *pubkey)?; - tokio::spawn(start_mock_relay_service(mock_state.clone(), port + 1)); - - let config = to_pbs_config(chain, get_pbs_static_config(port), vec![mock_relay]); - let state = PbsState::new(config); - tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); - - // leave some time to start servers - tokio::time::sleep(Duration::from_millis(100)).await; - - let mock_validator = MockValidator::new(port)?; - info!("Sending get header"); - let res = mock_validator.do_get_header(None, Some(Accept::Json), ForkName::Electra).await; - - assert!(res.is_ok()); - let response = res.unwrap(); - let content_type = get_content_type_header(&response.headers()); - let payload = response.bytes().await.unwrap(); - let _ = match content_type { - ContentType::Json => serde_json::from_slice::< - VersionedResponse< - SignedExecutionPayloadHeader, - SignedExecutionPayloadHeader, - >, - >(&payload), - ContentType::Ssz => panic!("Should be JSON"), - }; - assert_eq!(mock_state.received_get_header(), 1); - Ok(()) -} - -#[tokio::test] -async fn test_get_header_ssz() -> Result<()> { - setup_test_env(); - let signer = random_secret(); - let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); - - let chain = Chain::Holesky; - let port = 3000; - - let mock_state = Arc::new(MockRelayState::new(chain, signer)); - let mock_relay = generate_mock_relay(port + 1, *pubkey)?; - tokio::spawn(start_mock_relay_service(mock_state.clone(), port + 1)); - - let config = to_pbs_config(chain, get_pbs_static_config(port), vec![mock_relay]); - let state = PbsState::new(config); - tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); - - // leave some time to start servers - tokio::time::sleep(Duration::from_millis(100)).await; - - let mock_validator = MockValidator::new(port)?; - info!("Sending get header"); - let res = mock_validator.do_get_header(None, Some(Accept::Ssz), ForkName::Electra).await; - assert!(res.is_ok()); - let response = res.unwrap(); - let content_type = get_content_type_header(&response.headers()); - let consensus_version = get_consensus_version_header(&response.headers()).unwrap(); - let payload = response.bytes().await.unwrap(); - match content_type { - ContentType::Json => panic!("Should be SSZ"), - ContentType::Ssz => { - match consensus_version { - ForkName::Deneb => { - SignedExecutionPayloadHeader::::from_ssz_bytes(&payload).unwrap(); - } - ForkName::Electra => { - SignedExecutionPayloadHeader::::from_ssz_bytes(&payload).unwrap(); - } - }; - } - }; - assert_eq!(mock_state.received_get_header(), 1); - Ok(()) -} - -#[tokio::test] -async fn test_get_status() -> Result<()> { - setup_test_env(); - let signer = random_secret(); - let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); - - let chain = Chain::Holesky; - let port = 3100; - - let relays = - vec![generate_mock_relay(port + 1, *pubkey)?, generate_mock_relay(port + 2, *pubkey)?]; - let mock_state = Arc::new(MockRelayState::new(chain, signer)); - tokio::spawn(start_mock_relay_service(mock_state.clone(), port + 1)); - tokio::spawn(start_mock_relay_service(mock_state.clone(), port + 2)); - - let config = to_pbs_config(chain, get_pbs_static_config(port), relays); - let state = PbsState::new(config); - tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); - - // leave some time to start servers - tokio::time::sleep(Duration::from_millis(100)).await; - - let mock_validator = MockValidator::new(port)?; - info!("Sending get status"); - let res = mock_validator.do_get_status().await; - - assert!(res.is_ok()); - assert_eq!(mock_state.received_get_status(), 2); - Ok(()) -} - -#[tokio::test] -async fn test_register_validators() -> Result<()> { - setup_test_env(); - let signer = random_secret(); - let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); - - let chain = Chain::Holesky; - let port = 3300; - - let relays = vec![generate_mock_relay(port + 1, *pubkey)?]; - let mock_state = Arc::new(MockRelayState::new(chain, signer)); - tokio::spawn(start_mock_relay_service(mock_state.clone(), port + 1)); - - let config = to_pbs_config(chain, get_pbs_static_config(port), relays); - let state = PbsState::new(config); - tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); - - // leave some time to start servers - tokio::time::sleep(Duration::from_millis(100)).await; - - let mock_validator = MockValidator::new(port)?; - info!("Sending register validator"); - let res = mock_validator.do_register_validator().await; - - assert!(res.is_ok()); - assert_eq!(mock_state.received_register_validator(), 1); - Ok(()) -} - -#[tokio::test] -async fn test_batch_register_validators() -> Result<()> { - setup_test_env(); - let signer = random_secret(); - let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); - - let chain = Chain::Holesky; - let port = 3310; - - let relays = vec![generate_mock_relay_with_batch_size(port + 1, *pubkey, 5)?]; - let mock_state = Arc::new(MockRelayState::new(chain, signer)); - tokio::spawn(start_mock_relay_service(mock_state.clone(), port + 1)); - - let config = to_pbs_config(chain, get_pbs_static_config(port), relays); - let state = PbsState::new(config); - tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); - - // leave some time to start servers - tokio::time::sleep(Duration::from_millis(100)).await; - - let data = include_str!("../data/registration_holesky.json"); - let registrations: Vec = serde_json::from_str(data)?; - - let mock_validator = MockValidator::new(port)?; - info!("Sending register validator"); - let res = mock_validator.do_register_custom_validators(registrations.clone()).await; - - // registrations.len() == 17. 5 per batch, 4 batches - assert!(res.is_ok()); - assert_eq!(mock_state.received_register_validator(), 4); - - let mock_validator = MockValidator::new(port)?; - info!("Sending register validator"); - let res = mock_validator.do_register_custom_validators(registrations[..2].to_vec()).await; - - // Expected one more registration request - assert!(res.is_ok()); - assert_eq!(mock_state.received_register_validator(), 5); - - Ok(()) -} - -#[tokio::test] -async fn test_submit_block_deneb() -> Result<()> { - setup_test_env(); - let signer = random_secret(); - let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); - - let chain = Chain::Holesky; - let port = 3400; - - let relays = vec![generate_mock_relay(port + 1, *pubkey)?]; - let mock_state = Arc::new(MockRelayState::new(chain, signer)); - tokio::spawn(start_mock_relay_service(mock_state.clone(), port + 1)); - - let config = to_pbs_config(chain, get_pbs_static_config(port), relays); - let state = PbsState::new(config); - tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); - - // leave some time to start servers - tokio::time::sleep(Duration::from_millis(100)).await; - - let mock_validator = MockValidator::new(port)?; - info!("Sending submit block"); - let res = mock_validator - .do_submit_block(None, Accept::Json, ContentType::Json, ForkName::Deneb) - .await; - - assert!(!res.is_err()); - assert_eq!(mock_state.received_submit_block(), 1); - Ok(()) -} - -#[tokio::test] -async fn test_submit_block_electra() -> Result<()> { - setup_test_env(); - let signer = random_secret(); - let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); - - let chain = Chain::Holesky; - let port = 3400; - - let relays = vec![generate_mock_relay(port + 1, *pubkey)?]; - let mock_state = Arc::new(MockRelayState::new(chain, signer)); - tokio::spawn(start_mock_relay_service(mock_state.clone(), port + 1)); - - let config = to_pbs_config(chain, get_pbs_static_config(port), relays); - let state = PbsState::new(config); - tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); - - // leave some time to start servers - tokio::time::sleep(Duration::from_millis(100)).await; - - let mock_validator = MockValidator::new(port)?; - info!("Sending submit block"); - let res = mock_validator - .do_submit_block(None, Accept::Json, ContentType::Json, ForkName::Electra) - .await; - - assert!(!res.is_err()); - assert_eq!(mock_state.received_submit_block(), 1); - Ok(()) -} - -#[tokio::test] -async fn test_submit_block_ssz_deneb() -> Result<()> { - setup_test_env(); - let signer = random_secret(); - let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); - - let chain = Chain::Holesky; - let port = 3400; - - let relays = vec![generate_mock_relay(port + 1, *pubkey)?]; - let mock_state = Arc::new(MockRelayState::new(chain, signer)); - tokio::spawn(start_mock_relay_service(mock_state.clone(), port + 1)); - - let config = to_pbs_config(chain, get_pbs_static_config(port), relays); - let state = PbsState::new(config); - tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); - - // leave some time to start servers - tokio::time::sleep(Duration::from_millis(100)).await; - - let mock_validator = MockValidator::new(port)?; - info!("Sending submit block"); - let res = - mock_validator.do_submit_block(None, Accept::Ssz, ContentType::Ssz, ForkName::Deneb).await; - - assert!(!res.is_err()); - assert_eq!(mock_state.received_submit_block(), 1); - Ok(()) -} - -#[tokio::test] -async fn test_submit_block_ssz_electra() -> Result<()> { - setup_test_env(); - let signer = random_secret(); - let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); - - let chain = Chain::Holesky; - let port = 3400; - - let relays = vec![generate_mock_relay(port + 1, *pubkey)?]; - let mock_state = Arc::new(MockRelayState::new(chain, signer)); - tokio::spawn(start_mock_relay_service(mock_state.clone(), port + 1)); - - let config = to_pbs_config(chain, get_pbs_static_config(port), relays); - let state = PbsState::new(config); - tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); - - // leave some time to start servers - tokio::time::sleep(Duration::from_millis(100)).await; - - let mock_validator = MockValidator::new(port)?; - info!("Sending submit block"); - let res = mock_validator - .do_submit_block(None, Accept::Ssz, ContentType::Ssz, ForkName::Electra) - .await; - - assert!(!res.is_err()); - assert_eq!(mock_state.received_submit_block(), 1); - Ok(()) -} - -#[tokio::test] -async fn test_submit_block_too_large() -> Result<()> { - setup_test_env(); - let signer = random_secret(); - let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); - - let chain = Chain::Holesky; - let port = 3500; - - let relays = vec![generate_mock_relay(port + 1, *pubkey)?]; - let mock_state = Arc::new(MockRelayState::new(chain, signer).with_large_body()); - tokio::spawn(start_mock_relay_service(mock_state.clone(), port + 1)); - - let config = to_pbs_config(chain, get_pbs_static_config(port), relays); - let state = PbsState::new(config); - tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); - - // leave some time to start servers - tokio::time::sleep(Duration::from_millis(100)).await; - - let mock_validator = MockValidator::new(port)?; - info!("Sending submit block"); - let res = mock_validator - .do_submit_block(None, Accept::Json, ContentType::Json, ForkName::Electra) - .await; - - assert!(res.is_err()); - assert_eq!(mock_state.received_submit_block(), 1); - Ok(()) -} - -#[tokio::test] -async fn test_mux() -> Result<()> { - setup_test_env(); - let signer = random_secret(); - let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); - - let chain = Chain::Holesky; - let port = 3600; - - let mux_relay_1 = generate_mock_relay(port + 1, *pubkey)?; - let mux_relay_2 = generate_mock_relay(port + 2, *pubkey)?; - let default_relay = generate_mock_relay(port + 3, *pubkey)?; - - let mock_state = Arc::new(MockRelayState::new(chain, signer)); - tokio::spawn(start_mock_relay_service(mock_state.clone(), port + 1)); - tokio::spawn(start_mock_relay_service(mock_state.clone(), port + 2)); - tokio::spawn(start_mock_relay_service(mock_state.clone(), port + 3)); - - let relays = vec![default_relay.clone()]; - let mut config = to_pbs_config(chain, get_pbs_static_config(port), relays); - config.all_relays = vec![mux_relay_1.clone(), mux_relay_2.clone(), default_relay.clone()]; - - let mux = RuntimeMuxConfig { - id: String::from("test"), - config: config.pbs_config.clone(), - relays: vec![mux_relay_1, mux_relay_2], - }; - - let validator_pubkey = blst_pubkey_to_alloy(&random_secret().sk_to_pk()); - - config.muxes = Some(HashMap::from([(validator_pubkey, mux)])); - - let state = PbsState::new(config); - tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); - - // leave some time to start servers - tokio::time::sleep(Duration::from_millis(100)).await; - - let mock_validator = MockValidator::new(port)?; - info!("Sending get header with default"); - let res = mock_validator.do_get_header(None, Some(Accept::Json), ForkName::Electra).await; - - assert!(res.is_ok()); - assert_eq!(mock_state.received_get_header(), 1); // only default relay was used - - info!("Sending get header with mux"); - let res = mock_validator.do_get_header(Some(validator_pubkey), Some(Accept::Json), ForkName::Electra).await; - - assert!(res.is_ok()); - assert_eq!(mock_state.received_get_header(), 3); // two mux relays were used - - let res = mock_validator.do_get_status().await; - - assert!(res.is_ok()); - assert_eq!(mock_state.received_get_status(), 3); // default + 2 mux relays were used - - let res = mock_validator.do_register_validator().await; - - assert!(res.is_ok()); - assert_eq!(mock_state.received_register_validator(), 3); // default + 2 mux relays were used - - let res = mock_validator - .do_submit_block(None, Accept::Json, ContentType::Json, ForkName::Electra) - .await; - - assert!(res.is_err()); - assert_eq!(mock_state.received_submit_block(), 3); // default + 2 mux relays were used - - Ok(()) -} diff --git a/tests/tests/pbs_mux.rs b/tests/tests/pbs_mux.rs index 8dd6e75d..ee0bc7f0 100644 --- a/tests/tests/pbs_mux.rs +++ b/tests/tests/pbs_mux.rs @@ -61,13 +61,19 @@ async fn test_mux() -> Result<()> { // Send default request without specifying a validator key let mock_validator = MockValidator::new(pbs_port)?; info!("Sending get header with default"); - assert_eq!(mock_validator.do_get_header(None, None, ForkName::Electra).await?.status(), StatusCode::OK); + assert_eq!( + mock_validator.do_get_header(None, None, ForkName::Electra).await?.status(), + StatusCode::OK + ); assert_eq!(mock_state.received_get_header(), 1); // only default relay was used // Send request specifying a validator key to use mux info!("Sending get header with mux"); assert_eq!( - mock_validator.do_get_header(Some(validator_pubkey), None, ForkName::Electra).await?.status(), + mock_validator + .do_get_header(Some(validator_pubkey), None, ForkName::Electra) + .await? + .status(), StatusCode::OK ); assert_eq!(mock_state.received_get_header(), 3); // two mux relays were used diff --git a/tests/tests/pbs_post_blinded_blocks.rs b/tests/tests/pbs_post_blinded_blocks.rs index c7a332c6..a468ca1e 100644 --- a/tests/tests/pbs_post_blinded_blocks.rs +++ b/tests/tests/pbs_post_blinded_blocks.rs @@ -1,7 +1,7 @@ use std::{sync::Arc, time::Duration}; use cb_common::{ - pbs::{SignedBlindedBeaconBlock, SubmitBlindedBlockResponse}, + pbs::{PayloadAndBlobsElectra, SignedBlindedBeaconBlock, SubmitBlindedBlockResponse}, signer::{random_secret, BlsPublicKey}, types::Chain, utils::{blst_pubkey_to_alloy, Accept, ContentType, ForkName}, @@ -14,6 +14,7 @@ use cb_tests::{ }; use eyre::Result; use reqwest::StatusCode; +use ssz::Decode; use tracing::info; #[tokio::test] @@ -57,6 +58,47 @@ async fn test_submit_block() -> Result<()> { Ok(()) } +#[tokio::test] +async fn test_submit_block_ssz() -> Result<()> { + setup_test_env(); + let signer = random_secret(); + let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); + + let chain = Chain::Holesky; + let pbs_port = 3800; + + // Run a mock relay + let relays = vec![generate_mock_relay(pbs_port + 1, *pubkey)?]; + let mock_state = Arc::new(MockRelayState::new(chain, signer)); + tokio::spawn(start_mock_relay_service(mock_state.clone(), pbs_port + 1)); + + // Run the PBS service + let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), relays); + let state = PbsState::new(config); + tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); + + // leave some time to start servers + tokio::time::sleep(Duration::from_millis(100)).await; + + let mock_validator = MockValidator::new(pbs_port)?; + info!("Sending submit block"); + let res = mock_validator + .do_submit_block( + Some(SignedBlindedBeaconBlock::default()), + Accept::Ssz, + ContentType::Ssz, + ForkName::Electra, + ) + .await?; + + assert_eq!(res.status(), StatusCode::OK); + assert_eq!(mock_state.received_submit_block(), 1); + + let response_body = PayloadAndBlobsElectra::from_ssz_bytes(&res.bytes().await?).unwrap(); + assert_eq!(response_body.block_hash(), SubmitBlindedBlockResponse::default().block_hash()); + Ok(()) +} + #[tokio::test] async fn test_submit_block_too_large() -> Result<()> { setup_test_env(); From 06c207f4f820d7e31e29fa0d2eb6401ea91c3309 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Tue, 9 Sep 2025 04:22:31 -0400 Subject: [PATCH 04/11] Started porting #252 back to main --- Cargo.lock | 9 ++ Cargo.toml | 1 + crates/common/Cargo.toml | 1 + crates/common/src/signer/store.rs | 2 +- crates/common/src/utils.rs | 199 +++++++++++++++++++++++++- crates/pbs/Cargo.toml | 1 + crates/pbs/src/routes/get_header.rs | 40 +++++- crates/pbs/src/routes/submit_block.rs | 70 +++++++-- tests/Cargo.toml | 1 + tests/src/mock_relay.rs | 96 +++++++++---- tests/src/mock_validator.rs | 75 ++++++++-- tests/tests/payloads.rs | 11 +- tests/tests/pbs_get_header.rs | 5 +- 13 files changed, 448 insertions(+), 63 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2da754fc..28cdac64 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1531,6 +1531,7 @@ dependencies = [ "eyre", "futures", "jsonwebtoken", + "mediatype", "pbkdf2 0.12.2", "rand 0.9.2", "rayon", @@ -1577,6 +1578,7 @@ dependencies = [ "axum-extra", "cb-common", "cb-metrics", + "ethereum_ssz", "eyre", "futures", "lazy_static", @@ -1630,6 +1632,7 @@ dependencies = [ "cb-common", "cb-pbs", "cb-signer", + "ethereum_ssz", "eyre", "reqwest", "serde_json", @@ -3633,6 +3636,12 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" +[[package]] +name = "mediatype" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f490ea2ae935dd8ac89c472d4df28c7f6b87cc20767e1b21fd5ed6a16e7f61e4" + [[package]] name = "memchr" version = "2.7.5" diff --git a/Cargo.toml b/Cargo.toml index 7c3f6d0c..bcd00931 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -48,6 +48,7 @@ jsonwebtoken = { version = "9.3.1", default-features = false } lazy_static = "1.5.0" lh_eth2_keystore = { package = "eth2_keystore", git = "https://github.com/sigp/lighthouse", tag = "v7.1.0" } lh_types = { package = "types", git = "https://github.com/sigp/lighthouse", tag = "v7.1.0" } +mediatype = "0.20.0" parking_lot = "0.12.3" pbkdf2 = "0.12.2" prometheus = "0.13.4" diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index e46e1982..61da579d 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -25,6 +25,7 @@ futures.workspace = true jsonwebtoken.workspace = true lh_eth2_keystore.workspace = true lh_types.workspace = true +mediatype.workspace = true pbkdf2.workspace = true rand.workspace = true rayon.workspace = true diff --git a/crates/common/src/signer/store.rs b/crates/common/src/signer/store.rs index 09b9b91b..7cc0fc17 100644 --- a/crates/common/src/signer/store.rs +++ b/crates/common/src/signer/store.rs @@ -709,7 +709,7 @@ mod test { .join(consensus_signer.pubkey().to_string()) .join("TEST_MODULE") .join("bls") - .join(format!("{}.sig", proxy_signer.pubkey().to_string())) + .join(format!("{}.sig", proxy_signer.pubkey())) ) .unwrap() ) diff --git a/crates/common/src/utils.rs b/crates/common/src/utils.rs index 5d8c4cfd..56719181 100644 --- a/crates/common/src/utils.rs +++ b/crates/common/src/utils.rs @@ -1,16 +1,27 @@ #[cfg(test)] use std::cell::Cell; use std::{ + fmt, net::Ipv4Addr, + str::FromStr, time::{SystemTime, UNIX_EPOCH}, }; use alloy::{hex, primitives::U256}; -use axum::http::HeaderValue; +use axum::{ + extract::{FromRequest, Request}, + http::HeaderValue, + response::{IntoResponse, Response as AxumResponse}, +}; +use bytes::Bytes; use futures::StreamExt; use lh_types::test_utils::{SeedableRng, TestRandom, XorShiftRng}; +use mediatype::{MediaType, MediaTypeList, names}; use rand::{Rng, distr::Alphanumeric}; -use reqwest::{Response, header::HeaderMap}; +use reqwest::{ + Response, StatusCode, + header::{ACCEPT, CONTENT_TYPE, HeaderMap}, +}; use serde::{Serialize, de::DeserializeOwned}; use serde_json::Value; use ssz::{Decode, Encode}; @@ -31,6 +42,7 @@ use crate::{ }; const MILLIS_PER_SECOND: u64 = 1_000; +pub const CONSENSUS_VERSION_HEADER: &str = "Eth-Consensus-Version"; #[derive(Debug, Error)] pub enum ResponseReadError { @@ -408,6 +420,189 @@ pub fn get_user_agent_with_version(req_headers: &HeaderMap) -> eyre::Result Accept { + Accept::from_str( + req_headers.get(ACCEPT).and_then(|value| value.to_str().ok()).unwrap_or("application/json"), + ) + .unwrap_or(Accept::Json) +} + +/// Parse CONTENT TYPE header, default to JSON if missing or mal-formatted +pub fn get_content_type_header(req_headers: &HeaderMap) -> ContentType { + ContentType::from_str( + req_headers + .get(CONTENT_TYPE) + .and_then(|value| value.to_str().ok()) + .unwrap_or("application/json"), + ) + .unwrap_or(ContentType::Json) +} + +/// Parse CONSENSUS_VERSION header +pub fn get_consensus_version_header(req_headers: &HeaderMap) -> Option { + ForkName::from_str( + req_headers + .get(CONSENSUS_VERSION_HEADER) + .and_then(|value| value.to_str().ok()) + .unwrap_or(""), + ) + .ok() +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum ForkName { + Electra, +} + +impl std::fmt::Display for ForkName { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ForkName::Electra => write!(f, "electra"), + } + } +} + +impl FromStr for ForkName { + type Err = String; + fn from_str(value: &str) -> Result { + match value { + "electra" => Ok(ForkName::Electra), + _ => Err(format!("Invalid fork name {}", value)), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum ContentType { + Json, + Ssz, +} + +impl std::fmt::Display for ContentType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ContentType::Json => write!(f, "application/json"), + ContentType::Ssz => write!(f, "application/octet-stream"), + } + } +} + +impl FromStr for ContentType { + type Err = String; + fn from_str(value: &str) -> Result { + match value { + "application/json" => Ok(ContentType::Json), + "application/octet-stream" => Ok(ContentType::Ssz), + _ => Ok(ContentType::Json), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum Accept { + Json, + Ssz, + Any, +} + +impl fmt::Display for Accept { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Accept::Ssz => write!(f, "application/octet-stream"), + Accept::Json => write!(f, "application/json"), + Accept::Any => write!(f, "*/*"), + } + } +} + +impl FromStr for Accept { + type Err = String; + + fn from_str(s: &str) -> Result { + let media_type_list = MediaTypeList::new(s); + + // [q-factor weighting]: https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.2 + // find the highest q-factor supported accept type + let mut highest_q = 0_u16; + let mut accept_type = None; + + const APPLICATION: &str = names::APPLICATION.as_str(); + const OCTET_STREAM: &str = names::OCTET_STREAM.as_str(); + const JSON: &str = names::JSON.as_str(); + const STAR: &str = names::_STAR.as_str(); + const Q: &str = names::Q.as_str(); + + media_type_list.into_iter().for_each(|item| { + if let Ok(MediaType { ty, subty, suffix: _, params }) = item { + let q_accept = match (ty.as_str(), subty.as_str()) { + (APPLICATION, OCTET_STREAM) => Some(Accept::Ssz), + (APPLICATION, JSON) => Some(Accept::Json), + (STAR, STAR) => Some(Accept::Any), + _ => None, + } + .map(|item_accept_type| { + let q_val = params + .iter() + .find_map(|(n, v)| match n.as_str() { + Q => { + Some((v.as_str().parse::().unwrap_or(0_f32) * 1000_f32) as u16) + } + _ => None, + }) + .or(Some(1000_u16)); + + (q_val.unwrap(), item_accept_type) + }); + + match q_accept { + Some((q, accept)) if q > highest_q => { + highest_q = q; + accept_type = Some(accept); + } + _ => (), + } + } + }); + accept_type.ok_or_else(|| "accept header is not supported".to_string()) + } +} + +#[must_use] +#[derive(Debug, Clone, Copy, Default)] +pub struct JsonOrSsz(pub T); + +impl FromRequest for JsonOrSsz +where + T: serde::de::DeserializeOwned + ssz::Decode + 'static, + S: Send + Sync, +{ + type Rejection = AxumResponse; + + async fn from_request(req: Request, _state: &S) -> Result { + let headers = req.headers().clone(); + let content_type = headers.get(CONTENT_TYPE).and_then(|value| value.to_str().ok()); + + let bytes = Bytes::from_request(req, _state).await.map_err(IntoResponse::into_response)?; + + if let Some(content_type) = content_type { + if content_type.starts_with(&ContentType::Json.to_string()) { + let payload: T = serde_json::from_slice(&bytes) + .map_err(|_| StatusCode::BAD_REQUEST.into_response())?; + return Ok(Self(payload)); + } + + if content_type.starts_with(&ContentType::Ssz.to_string()) { + let payload = T::from_ssz_bytes(&bytes) + .map_err(|_| StatusCode::BAD_REQUEST.into_response())?; + return Ok(Self(payload)); + } + } + + Err(StatusCode::UNSUPPORTED_MEDIA_TYPE.into_response()) + } +} + #[cfg(unix)] pub async fn wait_for_signal() -> eyre::Result<()> { use tokio::signal::unix::{SignalKind, signal}; diff --git a/crates/pbs/Cargo.toml b/crates/pbs/Cargo.toml index d3aaace8..8a7b27cd 100644 --- a/crates/pbs/Cargo.toml +++ b/crates/pbs/Cargo.toml @@ -12,6 +12,7 @@ axum.workspace = true axum-extra.workspace = true cb-common.workspace = true cb-metrics.workspace = true +ethereum_ssz.workspace = true eyre.workspace = true futures.workspace = true lazy_static.workspace = true diff --git a/crates/pbs/src/routes/get_header.rs b/crates/pbs/src/routes/get_header.rs index 77607c28..31ba4c09 100644 --- a/crates/pbs/src/routes/get_header.rs +++ b/crates/pbs/src/routes/get_header.rs @@ -1,14 +1,15 @@ use alloy::primitives::utils::format_ether; use axum::{ extract::{Path, State}, - http::HeaderMap, + http::{HeaderMap, HeaderValue}, response::IntoResponse, }; use cb_common::{ - pbs::GetHeaderParams, - utils::{get_user_agent, ms_into_slot}, + pbs::{GetHeaderParams, VersionedResponse}, + utils::{Accept, CONSENSUS_VERSION_HEADER, get_accept_header, get_user_agent, ms_into_slot}, }; -use reqwest::StatusCode; +use reqwest::{StatusCode, header::CONTENT_TYPE}; +use ssz::Encode; use tracing::{error, info}; use crate::{ @@ -32,6 +33,7 @@ pub async fn handle_get_header>( let ua = get_user_agent(&req_headers); let ms_into_slot = ms_into_slot(params.slot, state.config.chain); + let accept_header = get_accept_header(&req_headers); info!(ua, ms_into_slot, "new request"); @@ -41,7 +43,35 @@ pub async fn handle_get_header>( info!(value_eth = format_ether(max_bid.value()), block_hash =% max_bid.block_hash(), "received header"); BEACON_NODE_STATUS.with_label_values(&["200", GET_HEADER_ENDPOINT_TAG]).inc(); - Ok((StatusCode::OK, axum::Json(max_bid)).into_response()) + let response = match accept_header { + Accept::Ssz => { + let mut res = match &max_bid { + VersionedResponse::Electra(max_bid) => { + (StatusCode::OK, max_bid.as_ssz_bytes()).into_response() + } + }; + let Ok(consensus_version_header) = HeaderValue::from_str(max_bid.version()) + else { + info!("sending response as JSON"); + return Ok((StatusCode::OK, axum::Json(max_bid)).into_response()); + }; + let Ok(content_type_header) = + HeaderValue::from_str(&format!("{}", Accept::Ssz)) + else { + info!("sending response as JSON"); + return Ok((StatusCode::OK, axum::Json(max_bid)).into_response()); + }; + res.headers_mut() + .insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + res.headers_mut().insert(CONTENT_TYPE, content_type_header); + info!("sending response as SSZ"); + res + } + Accept::Json | Accept::Any => { + (StatusCode::OK, axum::Json(max_bid)).into_response() + } + }; + Ok(response) } else { // spec: return 204 if request is valid but no bid available info!("no header available for slot"); diff --git a/crates/pbs/src/routes/submit_block.rs b/crates/pbs/src/routes/submit_block.rs index 27d2c798..69bc588d 100644 --- a/crates/pbs/src/routes/submit_block.rs +++ b/crates/pbs/src/routes/submit_block.rs @@ -1,9 +1,18 @@ -use axum::{Json, extract::State, http::HeaderMap, response::IntoResponse}; +use axum::{ + Json, + extract::State, + http::{HeaderMap, HeaderValue}, + response::IntoResponse, +}; use cb_common::{ - pbs::{BuilderApiVersion, SignedBlindedBeaconBlock}, - utils::{get_user_agent, timestamp_of_slot_start_millis, utcnow_ms}, + pbs::{BuilderApiVersion, SignedBlindedBeaconBlock, VersionedResponse}, + utils::{ + CONSENSUS_VERSION_HEADER, ContentType, JsonOrSsz, get_accept_header, get_user_agent, + timestamp_of_slot_start_millis, utcnow_ms, + }, }; -use reqwest::StatusCode; +use reqwest::{StatusCode, header::CONTENT_TYPE}; +use ssz::Encode; use tracing::{error, info, trace}; use crate::{ @@ -17,7 +26,7 @@ use crate::{ pub async fn handle_submit_block_v1>( state: State>, req_headers: HeaderMap, - signed_blinded_block: Json, + signed_blinded_block: JsonOrSsz, ) -> Result { handle_submit_block_impl::( state, @@ -31,7 +40,7 @@ pub async fn handle_submit_block_v1>( pub async fn handle_submit_block_v2>( state: State>, req_headers: HeaderMap, - signed_blinded_block: Json, + signed_blinded_block: JsonOrSsz, ) -> Result { handle_submit_block_impl::( state, @@ -45,7 +54,7 @@ pub async fn handle_submit_block_v2>( async fn handle_submit_block_impl>( State(state): State>, req_headers: HeaderMap, - Json(signed_blinded_block): Json, + JsonOrSsz(signed_blinded_block): JsonOrSsz, api_version: BuilderApiVersion, ) -> Result { tracing::Span::current().record("slot", signed_blinded_block.slot()); @@ -62,23 +71,64 @@ async fn handle_submit_block_impl>( let block_hash = signed_blinded_block.block_hash(); let slot_start_ms = timestamp_of_slot_start_millis(slot, state.config.chain); let ua = get_user_agent(&req_headers); + let accept_header = get_accept_header(&req_headers); info!(ua, ms_into_slot = now.saturating_sub(slot_start_ms), "new request"); match A::submit_block(signed_blinded_block, req_headers, state.clone(), &api_version).await { Ok(res) => match res { - Some(block_response) => { - trace!(?block_response); + Some(payload_and_blobs) => { + trace!(?payload_and_blobs); info!("received unblinded block (v1)"); BEACON_NODE_STATUS .with_label_values(&["200", SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG]) .inc(); - Ok((StatusCode::OK, Json(block_response).into_response())) + let response = match accept_header { + cb_common::utils::Accept::Json | cb_common::utils::Accept::Any => { + info!("sending response as JSON"); + Json(payload_and_blobs).into_response() + } + cb_common::utils::Accept::Ssz => { + let mut response = match &payload_and_blobs { + VersionedResponse::Electra(payload_and_blobs) => { + payload_and_blobs.as_ssz_bytes().into_response() + } + }; + let Ok(consensus_version_header) = + HeaderValue::from_str(payload_and_blobs.version()) + else { + info!("sending response as JSON"); + return Ok(( + StatusCode::OK, + axum::Json(payload_and_blobs).into_response(), + )); + }; + let Ok(content_type_header) = + HeaderValue::from_str(&ContentType::Ssz.to_string()) + else { + info!("sending response as JSON"); + return Ok(( + StatusCode::OK, + axum::Json(payload_and_blobs).into_response(), + )); + }; + response + .headers_mut() + .insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + response.headers_mut().insert(CONTENT_TYPE, content_type_header); + info!("sending response as SSZ"); + response + } + }; + + Ok((StatusCode::OK, response)) } None => { info!("received unblinded block (v2)"); + // Note: this doesn't provide consensus_version_header because it doesn't pass + // the body through, and there's no content-type header since the body is empty. BEACON_NODE_STATUS .with_label_values(&["202", SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG]) .inc(); diff --git a/tests/Cargo.toml b/tests/Cargo.toml index f1b5c9d9..7dae4bae 100644 --- a/tests/Cargo.toml +++ b/tests/Cargo.toml @@ -10,6 +10,7 @@ axum.workspace = true cb-common.workspace = true cb-pbs.workspace = true cb-signer.workspace = true +ethereum_ssz.workspace = true eyre.workspace = true reqwest.workspace = true serde_json.workspace = true diff --git a/tests/src/mock_relay.rs b/tests/src/mock_relay.rs index 61c3e84d..67c4f319 100644 --- a/tests/src/mock_relay.rs +++ b/tests/src/mock_relay.rs @@ -10,7 +10,7 @@ use alloy::{primitives::U256, rpc::types::beacon::relay::ValidatorRegistration}; use axum::{ Json, Router, extract::{Path, State}, - http::StatusCode, + http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Response}, routing::{get, post}, }; @@ -24,9 +24,14 @@ use cb_common::{ }, signature::sign_builder_root, types::{BlsSecretKey, Chain}, - utils::timestamp_of_slot_start_sec, + utils::{ + Accept, CONSENSUS_VERSION_HEADER, ForkName, JsonOrSsz, get_accept_header, + get_consensus_version_header, timestamp_of_slot_start_sec, + }, }; use cb_pbs::MAX_SIZE_SUBMIT_BLOCK_RESPONSE; +use reqwest::header::CONTENT_TYPE; +use ssz::Encode; use tokio::net::TcpListener; use tracing::debug; use tree_hash::TreeHash; @@ -109,29 +114,49 @@ pub fn mock_relay_app_router(state: Arc) -> Router { async fn handle_get_header( State(state): State>, Path(GetHeaderParams { parent_hash, .. }): Path, + headers: HeaderMap, ) -> Response { state.received_get_header.fetch_add(1, Ordering::Relaxed); - - let mut message = ExecutionPayloadHeaderMessageElectra { - header: Default::default(), - blob_kzg_commitments: Default::default(), - execution_requests: ExecutionRequests::default(), - value: Default::default(), - pubkey: state.signer.public_key(), + let accept_header = get_accept_header(&headers); + let consensus_version_header = + get_consensus_version_header(&headers).unwrap_or(ForkName::Electra); + + let data = match consensus_version_header { + // Add Fusaka and other forks here when necessary + ForkName::Electra => { + let mut message = ExecutionPayloadHeaderMessageElectra { + header: Default::default(), + blob_kzg_commitments: Default::default(), + execution_requests: ExecutionRequests::default(), + value: Default::default(), + pubkey: state.signer.public_key(), + }; + message.header.parent_hash = parent_hash; + message.header.block_hash.0[0] = 1; + message.value = U256::from(10); + message.pubkey = state.signer.public_key(); + message.header.timestamp = timestamp_of_slot_start_sec(0, state.chain); + + let object_root = message.tree_hash_root(); + let signature = sign_builder_root(state.chain, &state.signer, object_root); + let response = SignedExecutionPayloadHeader { message, signature }; + match accept_header { + Accept::Json | Accept::Any => { + let versioned_response = GetHeaderResponse::Electra(response); + serde_json::to_vec(&versioned_response).unwrap() + } + Accept::Ssz => response.as_ssz_bytes(), + } + } }; - message.header.parent_hash = parent_hash; - message.header.block_hash.0[0] = 1; - message.value = U256::from(10); - message.pubkey = state.signer.public_key(); - message.header.timestamp = timestamp_of_slot_start_sec(0, state.chain); - - let object_root = message.tree_hash_root(); - let signature = sign_builder_root(state.chain, &state.signer, object_root); - let response = SignedExecutionPayloadHeader { message, signature }; - - let response = GetHeaderResponse::Electra(response); - (StatusCode::OK, Json(response)).into_response() + let mut response = (StatusCode::OK, data).into_response(); + let consensus_version_header = + HeaderValue::from_str(&consensus_version_header.to_string()).unwrap(); + let content_type_header = HeaderValue::from_str(&accept_header.to_string()).unwrap(); + response.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + response.headers_mut().insert(CONTENT_TYPE, content_type_header); + response } async fn handle_get_status(State(state): State>) -> impl IntoResponse { @@ -154,12 +179,17 @@ async fn handle_register_validator( } async fn handle_submit_block_v1( + headers: HeaderMap, State(state): State>, - Json(submit_block): Json, + JsonOrSsz(submit_block): JsonOrSsz, ) -> Response { state.received_submit_block.fetch_add(1, Ordering::Relaxed); - if state.large_body() { - (StatusCode::OK, Json(vec![1u8; 1 + MAX_SIZE_SUBMIT_BLOCK_RESPONSE])).into_response() + let accept_header = get_accept_header(&headers); + let consensus_version_header = + get_consensus_version_header(&headers).unwrap_or(ForkName::Electra); + + let data = if state.large_body() { + vec![1u8; 1 + MAX_SIZE_SUBMIT_BLOCK_RESPONSE] } else { let VersionedResponse::Electra(mut response) = SubmitBlindedBlockResponse::default(); response.execution_payload.block_hash = submit_block.block_hash(); @@ -170,11 +200,23 @@ async fn handle_submit_block_v1( response.blobs_bundle.commitments = body.body.blob_kzg_commitments; response.blobs_bundle.proofs.push(KzgProof([0; 48])).unwrap(); - let response = VersionedResponse::Electra(response); + match accept_header { + Accept::Json | Accept::Any => serde_json::to_vec(&response).unwrap(), + Accept::Ssz => match consensus_version_header { + ForkName::Electra => response.as_ssz_bytes(), + }, + } + }; - (StatusCode::OK, Json(response)).into_response() - } + let mut response = (StatusCode::OK, data).into_response(); + let consensus_version_header = + HeaderValue::from_str(&consensus_version_header.to_string()).unwrap(); + let content_type_header = HeaderValue::from_str(&accept_header.to_string()).unwrap(); + response.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + response.headers_mut().insert(CONTENT_TYPE, content_type_header); + response } + async fn handle_submit_block_v2(State(state): State>) -> Response { state.received_submit_block.fetch_add(1, Ordering::Relaxed); (StatusCode::ACCEPTED, "").into_response() diff --git a/tests/src/mock_validator.rs b/tests/src/mock_validator.rs index ab593277..d92b0b24 100644 --- a/tests/src/mock_validator.rs +++ b/tests/src/mock_validator.rs @@ -2,9 +2,13 @@ use alloy::{primitives::B256, rpc::types::beacon::relay::ValidatorRegistration}; use cb_common::{ pbs::{BuilderApiVersion, RelayClient, SignedBlindedBeaconBlock}, types::BlsPublicKey, - utils::bls_pubkey_from_hex, + utils::{Accept, CONSENSUS_VERSION_HEADER, ContentType, ForkName, bls_pubkey_from_hex}, }; -use reqwest::Response; +use reqwest::{ + Response, + header::{ACCEPT, CONTENT_TYPE}, +}; +use ssz::Encode; use crate::utils::generate_mock_relay; @@ -20,13 +24,26 @@ impl MockValidator { Ok(Self { comm_boost: generate_mock_relay(port, pubkey)? }) } - pub async fn do_get_header(&self, pubkey: Option) -> eyre::Result { + pub async fn do_get_header( + &self, + pubkey: Option, + accept: Option, + fork_name: ForkName, + ) -> eyre::Result { let default_pubkey = bls_pubkey_from_hex( "0xac6e77dfe25ecd6110b8e780608cce0dab71fdd5ebea22a16c0205200f2f8e2e3ad3b71d3499c54ad14d6c21b41a37ae", )?; let url = self.comm_boost.get_header_url(0, &B256::ZERO, &pubkey.unwrap_or(default_pubkey))?; - Ok(self.comm_boost.client.get(url).send().await?) + let res = self + .comm_boost + .client + .get(url) + .header(ACCEPT, &accept.unwrap_or(Accept::Any).to_string()) + .header(CONSENSUS_VERSION_HEADER, &fork_name.to_string()) + .send() + .await?; + Ok(res) } pub async fn do_get_status(&self) -> eyre::Result { @@ -49,29 +66,65 @@ impl MockValidator { pub async fn do_submit_block_v1( &self, - signed_blinded_block: Option, + signed_blinded_block_opt: Option, + accept: Accept, + content_type: ContentType, + fork_name: ForkName, ) -> eyre::Result { - self.do_submit_block_impl(signed_blinded_block, BuilderApiVersion::V1).await + self.do_submit_block_impl( + signed_blinded_block_opt, + accept, + content_type, + fork_name, + BuilderApiVersion::V1, + ) + .await } pub async fn do_submit_block_v2( &self, - signed_blinded_block: Option, + signed_blinded_block_opt: Option, + accept: Accept, + content_type: ContentType, + fork_name: ForkName, ) -> eyre::Result { - self.do_submit_block_impl(signed_blinded_block, BuilderApiVersion::V2).await + self.do_submit_block_impl( + signed_blinded_block_opt, + accept, + content_type, + fork_name, + BuilderApiVersion::V2, + ) + .await } async fn do_submit_block_impl( &self, - signed_blinded_block: Option, + signed_blinded_block_opt: Option, + accept: Accept, + content_type: ContentType, + fork_name: ForkName, api_version: BuilderApiVersion, ) -> eyre::Result { let url = self.comm_boost.submit_block_url(api_version).unwrap(); let signed_blinded_block = - signed_blinded_block.unwrap_or_else(load_test_signed_blinded_block); + signed_blinded_block_opt.unwrap_or_else(load_test_signed_blinded_block); + let body = match content_type { + ContentType::Json => serde_json::to_vec(&signed_blinded_block).unwrap(), + ContentType::Ssz => signed_blinded_block.as_ssz_bytes(), + }; - Ok(self.comm_boost.client.post(url).json(&signed_blinded_block).send().await?) + Ok(self + .comm_boost + .client + .post(url) + .body(body) + .header(CONSENSUS_VERSION_HEADER, &fork_name.to_string()) + .header(CONTENT_TYPE, &content_type.to_string()) + .header(ACCEPT, &accept.to_string()) + .send() + .await?) } } diff --git a/tests/tests/payloads.rs b/tests/tests/payloads.rs index c43df7ef..2a631d22 100644 --- a/tests/tests/payloads.rs +++ b/tests/tests/payloads.rs @@ -30,12 +30,11 @@ fn test_missing_registration_field(field_name: &str) -> String { let mut values: Value = serde_json::from_str(data).unwrap(); // Remove specified field from the first validator's message - if let Value::Array(arr) = &mut values { - if let Some(first_validator) = arr.get_mut(0) { - if let Some(Value::Object(msg_obj)) = first_validator.get_mut("message") { - msg_obj.remove(field_name); - } - } + if let Value::Array(arr) = &mut values && + let Some(first_validator) = arr.get_mut(0) && + let Some(Value::Object(msg_obj)) = first_validator.get_mut("message") + { + msg_obj.remove(field_name); } // This should fail since the field is required diff --git a/tests/tests/pbs_get_header.rs b/tests/tests/pbs_get_header.rs index 66c83752..800e4ed2 100644 --- a/tests/tests/pbs_get_header.rs +++ b/tests/tests/pbs_get_header.rs @@ -2,7 +2,10 @@ use std::{sync::Arc, time::Duration}; use alloy::primitives::{B256, U256}; use cb_common::{ - pbs::GetHeaderResponse, signature::sign_builder_root, signer::random_secret, types::Chain, + pbs::{ExecutionPayloadHeaderMessageElectra, GetHeaderResponse, SignedExecutionPayloadHeader}, + signature::sign_builder_root, + signer::random_secret, + types::Chain, utils::timestamp_of_slot_start_sec, }; use cb_pbs::{DefaultBuilderApi, PbsService, PbsState}; From 24fabcac7f0e4b15db036f6b69ddf87305317469 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Tue, 9 Sep 2025 15:04:38 -0400 Subject: [PATCH 05/11] Finished porting, successful tests --- tests/src/mock_relay.rs | 7 ++- tests/tests/pbs_get_header.rs | 51 +++++++++++++++++++-- tests/tests/pbs_mux.rs | 33 +++++++++++--- tests/tests/pbs_post_blinded_blocks.rs | 61 +++++++++++++++++++++++--- 4 files changed, 136 insertions(+), 16 deletions(-) diff --git a/tests/src/mock_relay.rs b/tests/src/mock_relay.rs index 67c4f319..a2d808f6 100644 --- a/tests/src/mock_relay.rs +++ b/tests/src/mock_relay.rs @@ -201,8 +201,13 @@ async fn handle_submit_block_v1( response.blobs_bundle.proofs.push(KzgProof([0; 48])).unwrap(); match accept_header { - Accept::Json | Accept::Any => serde_json::to_vec(&response).unwrap(), + Accept::Json | Accept::Any => { + // Response is versioned for JSON + let response = VersionedResponse::Electra(response); + serde_json::to_vec(&response).unwrap() + } Accept::Ssz => match consensus_version_header { + // Response isn't versioned for SSZ ForkName::Electra => response.as_ssz_bytes(), }, } diff --git a/tests/tests/pbs_get_header.rs b/tests/tests/pbs_get_header.rs index 800e4ed2..cb35444f 100644 --- a/tests/tests/pbs_get_header.rs +++ b/tests/tests/pbs_get_header.rs @@ -6,7 +6,7 @@ use cb_common::{ signature::sign_builder_root, signer::random_secret, types::Chain, - utils::timestamp_of_slot_start_sec, + utils::{Accept, ForkName, timestamp_of_slot_start_sec}, }; use cb_pbs::{DefaultBuilderApi, PbsService, PbsState}; use cb_tests::{ @@ -16,6 +16,7 @@ use cb_tests::{ }; use eyre::Result; use reqwest::StatusCode; +use ssz::Decode; use tracing::info; use tree_hash::TreeHash; @@ -44,7 +45,7 @@ async fn test_get_header() -> Result<()> { let mock_validator = MockValidator::new(pbs_port)?; info!("Sending get header"); - let res = mock_validator.do_get_header(None).await?; + let res = mock_validator.do_get_header(None, None, ForkName::Electra).await?; assert_eq!(res.status(), StatusCode::OK); let res = serde_json::from_slice::(&res.bytes().await?)?; @@ -63,6 +64,50 @@ async fn test_get_header() -> Result<()> { Ok(()) } +#[tokio::test] +async fn test_get_header_ssz() -> Result<()> { + setup_test_env(); + let signer = random_secret(); + let pubkey = signer.public_key(); + + let chain = Chain::Holesky; + let pbs_port = 3210; + let relay_port = pbs_port + 1; + + // Run a mock relay + let mock_state = Arc::new(MockRelayState::new(chain, signer)); + let mock_relay = generate_mock_relay(relay_port, pubkey)?; + tokio::spawn(start_mock_relay_service(mock_state.clone(), relay_port)); + + // Run the PBS service + let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), vec![mock_relay.clone()]); + let state = PbsState::new(config); + tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); + + // leave some time to start servers + tokio::time::sleep(Duration::from_millis(100)).await; + + let mock_validator = MockValidator::new(pbs_port)?; + info!("Sending get header"); + let res = mock_validator.do_get_header(None, Some(Accept::Ssz), ForkName::Electra).await?; + assert_eq!(res.status(), StatusCode::OK); + + let res: SignedExecutionPayloadHeader = + SignedExecutionPayloadHeader::from_ssz_bytes(&res.bytes().await?).unwrap(); + + assert_eq!(mock_state.received_get_header(), 1); + assert_eq!(res.message.header.block_hash.0[0], 1); + assert_eq!(res.message.header.parent_hash, B256::ZERO); + assert_eq!(res.message.value, U256::from(10)); + assert_eq!(res.message.pubkey, mock_state.signer.public_key()); + assert_eq!(res.message.header.timestamp, timestamp_of_slot_start_sec(0, chain)); + assert_eq!( + res.signature, + sign_builder_root(chain, &mock_state.signer, res.message.tree_hash_root()) + ); + Ok(()) +} + #[tokio::test] async fn test_get_header_returns_204_if_relay_down() -> Result<()> { setup_test_env(); @@ -90,7 +135,7 @@ async fn test_get_header_returns_204_if_relay_down() -> Result<()> { let mock_validator = MockValidator::new(pbs_port)?; info!("Sending get header"); - let res = mock_validator.do_get_header(None).await?; + let res = mock_validator.do_get_header(None, None, ForkName::Electra).await?; assert_eq!(res.status(), StatusCode::NO_CONTENT); // 204 error assert_eq!(mock_state.received_get_header(), 0); // no header received diff --git a/tests/tests/pbs_mux.rs b/tests/tests/pbs_mux.rs index 28a3d369..c0a52c34 100644 --- a/tests/tests/pbs_mux.rs +++ b/tests/tests/pbs_mux.rs @@ -1,6 +1,11 @@ use std::{collections::HashMap, sync::Arc, time::Duration}; -use cb_common::{config::RuntimeMuxConfig, signer::random_secret, types::Chain}; +use cb_common::{ + config::RuntimeMuxConfig, + signer::random_secret, + types::Chain, + utils::{Accept, ContentType, ForkName}, +}; use cb_pbs::{DefaultBuilderApi, PbsService, PbsState}; use cb_tests::{ mock_relay::{MockRelayState, start_mock_relay_service}, @@ -56,13 +61,19 @@ async fn test_mux() -> Result<()> { // Send default request without specifying a validator key let mock_validator = MockValidator::new(pbs_port)?; info!("Sending get header with default"); - assert_eq!(mock_validator.do_get_header(None).await?.status(), StatusCode::OK); + assert_eq!( + mock_validator.do_get_header(None, None, ForkName::Electra).await?.status(), + StatusCode::OK + ); assert_eq!(mock_state.received_get_header(), 1); // only default relay was used // Send request specifying a validator key to use mux info!("Sending get header with mux"); assert_eq!( - mock_validator.do_get_header(Some(validator_pubkey)).await?.status(), + mock_validator + .do_get_header(Some(validator_pubkey), None, ForkName::Electra) + .await? + .status(), StatusCode::OK ); assert_eq!(mock_state.received_get_header(), 3); // two mux relays were used @@ -79,12 +90,24 @@ async fn test_mux() -> Result<()> { // v1 Submit block requests should go to all relays info!("Sending submit block v1"); - assert_eq!(mock_validator.do_submit_block_v1(None).await?.status(), StatusCode::OK); + assert_eq!( + mock_validator + .do_submit_block_v1(None, Accept::Json, ContentType::Json, ForkName::Electra) + .await? + .status(), + StatusCode::OK + ); assert_eq!(mock_state.received_submit_block(), 3); // default + 2 mux relays were used // v2 Submit block requests should go to all relays info!("Sending submit block v2"); - assert_eq!(mock_validator.do_submit_block_v2(None).await?.status(), StatusCode::ACCEPTED); + assert_eq!( + mock_validator + .do_submit_block_v2(None, Accept::Json, ContentType::Json, ForkName::Electra) + .await? + .status(), + StatusCode::ACCEPTED + ); assert_eq!(mock_state.received_submit_block(), 6); // default + 2 mux relays were used Ok(()) diff --git a/tests/tests/pbs_post_blinded_blocks.rs b/tests/tests/pbs_post_blinded_blocks.rs index f0c47471..c17c8c29 100644 --- a/tests/tests/pbs_post_blinded_blocks.rs +++ b/tests/tests/pbs_post_blinded_blocks.rs @@ -1,9 +1,10 @@ use std::{sync::Arc, time::Duration}; use cb_common::{ - pbs::{BuilderApiVersion, SubmitBlindedBlockResponse}, + pbs::{BuilderApiVersion, PayloadAndBlobsElectra, SubmitBlindedBlockResponse}, signer::random_secret, types::Chain, + utils::{Accept, ContentType, ForkName}, }; use cb_pbs::{DefaultBuilderApi, PbsService, PbsState}; use cb_tests::{ @@ -13,11 +14,12 @@ use cb_tests::{ }; use eyre::Result; use reqwest::{Response, StatusCode}; +use ssz::Decode; use tracing::info; #[tokio::test] async fn test_submit_block_v1() -> Result<()> { - let res = submit_block_impl(3800, &BuilderApiVersion::V1).await?; + let res = submit_block_impl(3800, BuilderApiVersion::V1, ContentType::Json).await?; assert_eq!(res.status(), StatusCode::OK); let signed_blinded_block = load_test_signed_blinded_block(); @@ -29,7 +31,27 @@ async fn test_submit_block_v1() -> Result<()> { #[tokio::test] async fn test_submit_block_v2() -> Result<()> { - let res = submit_block_impl(3850, &BuilderApiVersion::V2).await?; + let res = submit_block_impl(3850, BuilderApiVersion::V2, ContentType::Json).await?; + assert_eq!(res.status(), StatusCode::ACCEPTED); + assert_eq!(res.bytes().await?.len(), 0); + Ok(()) +} + +#[tokio::test] +async fn test_submit_block_v1_ssz() -> Result<()> { + let res = submit_block_impl(3810, BuilderApiVersion::V1, ContentType::Ssz).await?; + assert_eq!(res.status(), StatusCode::OK); + + let signed_blinded_block = load_test_signed_blinded_block(); + + let response_body = PayloadAndBlobsElectra::from_ssz_bytes(&res.bytes().await?).unwrap(); + assert_eq!(response_body.block_hash(), signed_blinded_block.block_hash()); + Ok(()) +} + +#[tokio::test] +async fn test_submit_block_v2_ssz() -> Result<()> { + let res = submit_block_impl(3860, BuilderApiVersion::V2, ContentType::Ssz).await?; assert_eq!(res.status(), StatusCode::ACCEPTED); assert_eq!(res.bytes().await?.len(), 0); Ok(()) @@ -57,7 +79,9 @@ async fn test_submit_block_too_large() -> Result<()> { let mock_validator = MockValidator::new(pbs_port)?; info!("Sending submit block"); - let res = mock_validator.do_submit_block_v1(None).await; + let res = mock_validator + .do_submit_block_v1(None, Accept::Json, ContentType::Json, ForkName::Electra) + .await; // response size exceeds max size: max: 20971520 assert_eq!(res.unwrap().status(), StatusCode::BAD_GATEWAY); @@ -65,7 +89,16 @@ async fn test_submit_block_too_large() -> Result<()> { Ok(()) } -async fn submit_block_impl(pbs_port: u16, api_version: &BuilderApiVersion) -> Result { +async fn submit_block_impl( + pbs_port: u16, + api_version: BuilderApiVersion, + serialization_mode: ContentType, +) -> Result { + let accept = match serialization_mode { + ContentType::Json => Accept::Json, + ContentType::Ssz => Accept::Ssz, + }; + setup_test_env(); let signer = random_secret(); let pubkey = signer.public_key(); @@ -90,10 +123,24 @@ async fn submit_block_impl(pbs_port: u16, api_version: &BuilderApiVersion) -> Re info!("Sending submit block"); let res = match api_version { BuilderApiVersion::V1 => { - mock_validator.do_submit_block_v1(Some(signed_blinded_block)).await? + mock_validator + .do_submit_block_v1( + Some(signed_blinded_block), + accept, + serialization_mode, + ForkName::Electra, + ) + .await? } BuilderApiVersion::V2 => { - mock_validator.do_submit_block_v2(Some(signed_blinded_block)).await? + mock_validator + .do_submit_block_v2( + Some(signed_blinded_block), + accept, + serialization_mode, + ForkName::Electra, + ) + .await? } }; assert_eq!(mock_state.received_submit_block(), 1); From bad567502f10d1df978476cc447a9503998e1802 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Tue, 9 Sep 2025 15:31:02 -0400 Subject: [PATCH 06/11] Cleaned up merge --- crates/cli/src/docker_init.rs | 7 +- crates/common/src/signer/store.rs | 8 +- crates/common/src/types.rs | 13 +- crates/common/src/utils.rs | 186 ----------------------- crates/pbs/src/mev_boost/submit_block.rs | 6 +- tests/src/mock_validator.rs | 7 - tests/tests/pbs_get_header.rs | 44 ------ tests/tests/pbs_post_blinded_blocks.rs | 41 ----- 8 files changed, 15 insertions(+), 297 deletions(-) diff --git a/crates/cli/src/docker_init.rs b/crates/cli/src/docker_init.rs index a16c6396..b535e54d 100644 --- a/crates/cli/src/docker_init.rs +++ b/crates/cli/src/docker_init.rs @@ -154,10 +154,9 @@ pub async fn handle_docker_init(config_path: PathBuf, output_dir: PathBuf) -> Re // depends_on let mut module_dependencies = IndexMap::new(); - module_dependencies.insert( - "cb_signer".into(), - DependsCondition { condition: "service_healthy".into() }, - ); + module_dependencies.insert("cb_signer".into(), DependsCondition { + condition: "service_healthy".into(), + }); Service { container_name: Some(module_cid.clone()), diff --git a/crates/common/src/signer/store.rs b/crates/common/src/signer/store.rs index b868026a..7cc0fc17 100644 --- a/crates/common/src/signer/store.rs +++ b/crates/common/src/signer/store.rs @@ -302,8 +302,8 @@ impl ProxyStore { let entry = entry?; let path = entry.path(); - if !path.is_file() - || path.extension().is_none_or(|ext| ext != "json") + if !path.is_file() || + path.extension().is_none_or(|ext| ext != "json") { continue; } @@ -376,8 +376,8 @@ impl ProxyStore { let entry = entry?; let path = entry.path(); - if !path.is_file() - || path.extension().is_none_or(|ext| ext != "json") + if !path.is_file() || + path.extension().is_none_or(|ext| ext != "json") { continue; } diff --git a/crates/common/src/types.rs b/crates/common/src/types.rs index 141fa270..17775ba0 100644 --- a/crates/common/src/types.rs +++ b/crates/common/src/types.rs @@ -358,14 +358,11 @@ mod tests { fn test_load_custom() { let s = r#"chain = { genesis_time_secs = 1, slot_time_secs = 2, genesis_fork_version = "0x01000000" }"#; let decoded: MockConfig = toml::from_str(s).unwrap(); - assert_eq!( - decoded.chain, - Chain::Custom { - genesis_time_secs: 1, - slot_time_secs: 2, - genesis_fork_version: [1, 0, 0, 0] - } - ) + assert_eq!(decoded.chain, Chain::Custom { + genesis_time_secs: 1, + slot_time_secs: 2, + genesis_fork_version: [1, 0, 0, 0] + }) } #[test] diff --git a/crates/common/src/utils.rs b/crates/common/src/utils.rs index f4281f9a..56719181 100644 --- a/crates/common/src/utils.rs +++ b/crates/common/src/utils.rs @@ -603,192 +603,6 @@ where } } -/// Parse ACCEPT header, default to JSON if missing or mal-formatted -pub fn get_accept_header(req_headers: &HeaderMap) -> Accept { - Accept::from_str( - req_headers.get(ACCEPT).and_then(|value| value.to_str().ok()).unwrap_or("application/json"), - ) - .unwrap_or(Accept::Json) -} - -/// Parse CONTENT TYPE header, default to JSON if missing or mal-formatted -pub fn get_content_type_header(req_headers: &HeaderMap) -> ContentType { - ContentType::from_str( - req_headers - .get(CONTENT_TYPE) - .and_then(|value| value.to_str().ok()) - .unwrap_or("application/json"), - ) - .unwrap_or(ContentType::Json) -} - -/// Parse CONSENSUS_VERSION header -pub fn get_consensus_version_header(req_headers: &HeaderMap) -> Option { - ForkName::from_str( - req_headers - .get(CONSENSUS_VERSION_HEADER) - .and_then(|value| value.to_str().ok()) - .unwrap_or(""), - ) - .ok() -} - -#[derive(Debug, Clone, Copy, PartialEq)] -pub enum ForkName { - Deneb, - Electra, -} - -impl std::fmt::Display for ForkName { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - ForkName::Deneb => write!(f, "deneb"), - ForkName::Electra => write!(f, "electra"), - } - } -} - -impl FromStr for ForkName { - type Err = String; - fn from_str(value: &str) -> Result { - match value { - "deneb" => Ok(ForkName::Deneb), - "electra" => Ok(ForkName::Electra), - _ => Err(format!("Invalid fork name {}", value)), - } - } -} - -#[derive(Debug, Clone, Copy, PartialEq)] -pub enum ContentType { - Json, - Ssz, -} - -impl std::fmt::Display for ContentType { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - ContentType::Json => write!(f, "application/json"), - ContentType::Ssz => write!(f, "application/octet-stream"), - } - } -} - -impl FromStr for ContentType { - type Err = String; - fn from_str(value: &str) -> Result { - match value { - "application/json" => Ok(ContentType::Json), - "application/octet-stream" => Ok(ContentType::Ssz), - _ => Ok(ContentType::Json), - } - } -} - -#[derive(Debug, Clone, Copy, PartialEq)] -pub enum Accept { - Json, - Ssz, - Any, -} - -impl fmt::Display for Accept { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Accept::Ssz => write!(f, "application/octet-stream"), - Accept::Json => write!(f, "application/json"), - Accept::Any => write!(f, "*/*"), - } - } -} - -impl FromStr for Accept { - type Err = String; - - fn from_str(s: &str) -> Result { - let media_type_list = MediaTypeList::new(s); - - // [q-factor weighting]: https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.2 - // find the highest q-factor supported accept type - let mut highest_q = 0_u16; - let mut accept_type = None; - - const APPLICATION: &str = names::APPLICATION.as_str(); - const OCTET_STREAM: &str = names::OCTET_STREAM.as_str(); - const JSON: &str = names::JSON.as_str(); - const STAR: &str = names::_STAR.as_str(); - const Q: &str = names::Q.as_str(); - - media_type_list.into_iter().for_each(|item| { - if let Ok(MediaType { ty, subty, suffix: _, params }) = item { - let q_accept = match (ty.as_str(), subty.as_str()) { - (APPLICATION, OCTET_STREAM) => Some(Accept::Ssz), - (APPLICATION, JSON) => Some(Accept::Json), - (STAR, STAR) => Some(Accept::Any), - _ => None, - } - .map(|item_accept_type| { - let q_val = params - .iter() - .find_map(|(n, v)| match n.as_str() { - Q => { - Some((v.as_str().parse::().unwrap_or(0_f32) * 1000_f32) as u16) - } - _ => None, - }) - .or(Some(1000_u16)); - - (q_val.unwrap(), item_accept_type) - }); - - match q_accept { - Some((q, accept)) if q > highest_q => { - highest_q = q; - accept_type = Some(accept); - } - _ => (), - } - } - }); - accept_type.ok_or_else(|| "accept header is not supported".to_string()) - } -} - -#[must_use] -#[derive(Debug, Clone, Copy, Default)] -pub struct JsonOrSsz(pub T); - -impl FromRequest for JsonOrSsz -where - T: serde::de::DeserializeOwned + ssz::Decode + 'static, - S: Send + Sync, -{ - type Rejection = Response; - - async fn from_request(req: Request, _state: &S) -> Result { - let headers = req.headers().clone(); - let content_type = headers.get(CONTENT_TYPE).and_then(|value| value.to_str().ok()); - - let bytes = Bytes::from_request(req, _state).await.map_err(IntoResponse::into_response)?; - - if let Some(content_type) = content_type { - if content_type.starts_with(&ContentType::Json.to_string()) { - let payload: T = serde_json::from_slice(&bytes) - .map_err(|_| StatusCode::BAD_REQUEST.into_response())?; - return Ok(Self(payload)); - } - - if content_type.starts_with(&ContentType::Ssz.to_string()) { - let payload = T::from_ssz_bytes(&bytes) - .map_err(|_| StatusCode::BAD_REQUEST.into_response())?; - return Ok(Self(payload)); - } - } - - Err(StatusCode::UNSUPPORTED_MEDIA_TYPE.into_response()) - } -} - #[cfg(unix)] pub async fn wait_for_signal() -> eyre::Result<()> { use tokio::signal::unix::{SignalKind, signal}; diff --git a/crates/pbs/src/mev_boost/submit_block.rs b/crates/pbs/src/mev_boost/submit_block.rs index 1653e66e..0336bde8 100644 --- a/crates/pbs/src/mev_boost/submit_block.rs +++ b/crates/pbs/src/mev_boost/submit_block.rs @@ -208,9 +208,9 @@ fn validate_unblinded_block_electra( let blobs = &block_response.blobs_bundle; let expected_commitments = &signed_blinded_block.body.blob_kzg_commitments; - if expected_commitments.len() != blobs.blobs.len() - || expected_commitments.len() != blobs.commitments.len() - || expected_commitments.len() != blobs.proofs.len() + if expected_commitments.len() != blobs.blobs.len() || + expected_commitments.len() != blobs.commitments.len() || + expected_commitments.len() != blobs.proofs.len() { return Err(PbsError::Validation(ValidationError::KzgCommitments { expected_blobs: expected_commitments.len(), diff --git a/tests/src/mock_validator.rs b/tests/src/mock_validator.rs index 20e34d6f..d92b0b24 100644 --- a/tests/src/mock_validator.rs +++ b/tests/src/mock_validator.rs @@ -115,13 +115,6 @@ impl MockValidator { ContentType::Ssz => signed_blinded_block.as_ssz_bytes(), }; - let signed_blinded_block = signed_blinded_block_opt.unwrap_or_default(); - - let body = match content_type { - ContentType::Json => serde_json::to_vec(&signed_blinded_block).unwrap(), - ContentType::Ssz => signed_blinded_block.as_ssz_bytes(), - }; - Ok(self .comm_boost .client diff --git a/tests/tests/pbs_get_header.rs b/tests/tests/pbs_get_header.rs index 9d3014e5..cb35444f 100644 --- a/tests/tests/pbs_get_header.rs +++ b/tests/tests/pbs_get_header.rs @@ -108,50 +108,6 @@ async fn test_get_header_ssz() -> Result<()> { Ok(()) } -#[tokio::test] -async fn test_get_header_ssz() -> Result<()> { - setup_test_env(); - let signer = random_secret(); - let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); - - let chain = Chain::Holesky; - let pbs_port = 3200; - let relay_port = pbs_port + 1; - - // Run a mock relay - let mock_state = Arc::new(MockRelayState::new(chain, signer)); - let mock_relay = generate_mock_relay(relay_port, *pubkey)?; - tokio::spawn(start_mock_relay_service(mock_state.clone(), relay_port)); - - // Run the PBS service - let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), vec![mock_relay.clone()]); - let state = PbsState::new(config); - tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); - - // leave some time to start servers - tokio::time::sleep(Duration::from_millis(100)).await; - - let mock_validator = MockValidator::new(pbs_port)?; - info!("Sending get header"); - let res = mock_validator.do_get_header(None, Some(Accept::Ssz), ForkName::Electra).await?; - assert_eq!(res.status(), StatusCode::OK); - - let res: SignedExecutionPayloadHeader = - SignedExecutionPayloadHeader::from_ssz_bytes(&res.bytes().await?).unwrap(); - - assert_eq!(mock_state.received_get_header(), 1); - assert_eq!(res.message.header.block_hash.0[0], 1); - assert_eq!(res.message.header.parent_hash, B256::ZERO); - assert_eq!(res.message.value, U256::from(10)); - assert_eq!(res.message.pubkey, blst_pubkey_to_alloy(&mock_state.signer.sk_to_pk())); - assert_eq!(res.message.header.timestamp, timestamp_of_slot_start_sec(0, chain)); - assert_eq!( - res.signature, - sign_builder_root(chain, &mock_state.signer, res.message.tree_hash_root().0) - ); - Ok(()) -} - #[tokio::test] async fn test_get_header_returns_204_if_relay_down() -> Result<()> { setup_test_env(); diff --git a/tests/tests/pbs_post_blinded_blocks.rs b/tests/tests/pbs_post_blinded_blocks.rs index 332105cb..c17c8c29 100644 --- a/tests/tests/pbs_post_blinded_blocks.rs +++ b/tests/tests/pbs_post_blinded_blocks.rs @@ -57,47 +57,6 @@ async fn test_submit_block_v2_ssz() -> Result<()> { Ok(()) } -#[tokio::test] -async fn test_submit_block_ssz() -> Result<()> { - setup_test_env(); - let signer = random_secret(); - let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk()).into(); - - let chain = Chain::Holesky; - let pbs_port = 3800; - - // Run a mock relay - let relays = vec![generate_mock_relay(pbs_port + 1, *pubkey)?]; - let mock_state = Arc::new(MockRelayState::new(chain, signer)); - tokio::spawn(start_mock_relay_service(mock_state.clone(), pbs_port + 1)); - - // Run the PBS service - let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), relays); - let state = PbsState::new(config); - tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); - - // leave some time to start servers - tokio::time::sleep(Duration::from_millis(100)).await; - - let mock_validator = MockValidator::new(pbs_port)?; - info!("Sending submit block"); - let res = mock_validator - .do_submit_block( - Some(SignedBlindedBeaconBlock::default()), - Accept::Ssz, - ContentType::Ssz, - ForkName::Electra, - ) - .await?; - - assert_eq!(res.status(), StatusCode::OK); - assert_eq!(mock_state.received_submit_block(), 1); - - let response_body = PayloadAndBlobsElectra::from_ssz_bytes(&res.bytes().await?).unwrap(); - assert_eq!(response_body.block_hash(), SubmitBlindedBlockResponse::default().block_hash()); - Ok(()) -} - #[tokio::test] async fn test_submit_block_too_large() -> Result<()> { setup_test_env(); From 50fccb5a189ead725e5404bd9029a56b41ec291b Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Tue, 9 Sep 2025 17:25:46 -0400 Subject: [PATCH 07/11] Fixed clippy --- crates/common/src/utils.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/common/src/utils.rs b/crates/common/src/utils.rs index 56719181..09c50d7b 100644 --- a/crates/common/src/utils.rs +++ b/crates/common/src/utils.rs @@ -468,7 +468,7 @@ impl FromStr for ForkName { fn from_str(value: &str) -> Result { match value { "electra" => Ok(ForkName::Electra), - _ => Err(format!("Invalid fork name {}", value)), + _ => Err(format!("Invalid fork name {value}")), } } } From 86fa85854cd7ecd6b7eb5b63803dc3c18a2e2463 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Mon, 15 Sep 2025 10:19:56 -0400 Subject: [PATCH 08/11] Swapped to LH's ForkName enum --- crates/common/src/utils.rs | 24 +----------------------- tests/src/mock_relay.rs | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 23 deletions(-) diff --git a/crates/common/src/utils.rs b/crates/common/src/utils.rs index 09c50d7b..ac01890c 100644 --- a/crates/common/src/utils.rs +++ b/crates/common/src/utils.rs @@ -15,6 +15,7 @@ use axum::{ }; use bytes::Bytes; use futures::StreamExt; +pub use lh_types::ForkName; use lh_types::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use mediatype::{MediaType, MediaTypeList, names}; use rand::{Rng, distr::Alphanumeric}; @@ -450,29 +451,6 @@ pub fn get_consensus_version_header(req_headers: &HeaderMap) -> Option .ok() } -#[derive(Debug, Clone, Copy, PartialEq)] -pub enum ForkName { - Electra, -} - -impl std::fmt::Display for ForkName { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - ForkName::Electra => write!(f, "electra"), - } - } -} - -impl FromStr for ForkName { - type Err = String; - fn from_str(value: &str) -> Result { - match value { - "electra" => Ok(ForkName::Electra), - _ => Err(format!("Invalid fork name {value}")), - } - } -} - #[derive(Debug, Clone, Copy, PartialEq)] pub enum ContentType { Json, diff --git a/tests/src/mock_relay.rs b/tests/src/mock_relay.rs index a2d808f6..a7a7f0f0 100644 --- a/tests/src/mock_relay.rs +++ b/tests/src/mock_relay.rs @@ -148,6 +148,13 @@ async fn handle_get_header( Accept::Ssz => response.as_ssz_bytes(), } } + _ => { + return ( + StatusCode::BAD_REQUEST, + format!("Unsupported fork {consensus_version_header}"), + ) + .into_response(); + } }; let mut response = (StatusCode::OK, data).into_response(); @@ -209,6 +216,13 @@ async fn handle_submit_block_v1( Accept::Ssz => match consensus_version_header { // Response isn't versioned for SSZ ForkName::Electra => response.as_ssz_bytes(), + _ => { + return ( + StatusCode::BAD_REQUEST, + format!("Unsupported fork {consensus_version_header}"), + ) + .into_response(); + } }, } }; From e7335f6da2a5582655e7cf0e0ce99eb70ddb209f Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Tue, 16 Sep 2025 10:58:09 -0400 Subject: [PATCH 09/11] Refactored encoding type support based on feedback --- Cargo.lock | 12 ++ Cargo.toml | 1 + crates/common/Cargo.toml | 1 + crates/common/src/utils.rs | 195 ++++++++++++------------- crates/pbs/src/error.rs | 3 + crates/pbs/src/routes/get_header.rs | 23 ++- crates/pbs/src/routes/submit_block.rs | 50 ++++--- tests/src/mock_relay.rs | 42 ++++-- tests/src/mock_validator.rs | 22 +-- tests/tests/pbs_get_header.rs | 5 +- tests/tests/pbs_mux.rs | 6 +- tests/tests/pbs_post_blinded_blocks.rs | 19 +-- 12 files changed, 208 insertions(+), 171 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 28cdac64..39299446 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1530,6 +1530,7 @@ dependencies = [ "ethereum_ssz_derive", "eyre", "futures", + "headers-accept", "jsonwebtoken", "mediatype", "pbkdf2 0.12.2", @@ -2957,6 +2958,17 @@ dependencies = [ "sha1", ] +[[package]] +name = "headers-accept" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479bcb872e714e11f72fcc6a71afadbc86d0dbe887bc44252b04cfbc63272897" +dependencies = [ + "headers-core", + "http", + "mediatype", +] + [[package]] name = "headers-core" version = "0.3.0" diff --git a/Cargo.toml b/Cargo.toml index bcd00931..6aebb54b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,6 +43,7 @@ ethereum_ssz_derive = "0.8" eyre = "0.6.12" futures = "0.3.30" headers = "0.4.0" +headers-accept = "0.2.1" indexmap = "2.2.6" jsonwebtoken = { version = "9.3.1", default-features = false } lazy_static = "1.5.0" diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 61da579d..7f1ac06e 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -22,6 +22,7 @@ ethereum_ssz.workspace = true ethereum_ssz_derive.workspace = true eyre.workspace = true futures.workspace = true +headers-accept.workspace = true jsonwebtoken.workspace = true lh_eth2_keystore.workspace = true lh_types.workspace = true diff --git a/crates/common/src/utils.rs b/crates/common/src/utils.rs index ac01890c..6882af38 100644 --- a/crates/common/src/utils.rs +++ b/crates/common/src/utils.rs @@ -1,7 +1,7 @@ #[cfg(test)] use std::cell::Cell; use std::{ - fmt, + fmt::Display, net::Ipv4Addr, str::FromStr, time::{SystemTime, UNIX_EPOCH}, @@ -15,12 +15,12 @@ use axum::{ }; use bytes::Bytes; use futures::StreamExt; +use headers_accept::Accept; pub use lh_types::ForkName; use lh_types::test_utils::{SeedableRng, TestRandom, XorShiftRng}; -use mediatype::{MediaType, MediaTypeList, names}; use rand::{Rng, distr::Alphanumeric}; use reqwest::{ - Response, StatusCode, + Response, header::{ACCEPT, CONTENT_TYPE, HeaderMap}, }; use serde::{Serialize, de::DeserializeOwned}; @@ -42,6 +42,10 @@ use crate::{ types::{BlsPublicKey, Chain, Jwt, JwtClaims, ModuleId}, }; +const APPLICATION_JSON: &str = "application/json"; +const APPLICATION_OCTET_STREAM: &str = "application/octet-stream"; +const WILDCARD: &str = "*/*"; + const MILLIS_PER_SECOND: u64 = 1_000; pub const CONSENSUS_VERSION_HEADER: &str = "Eth-Consensus-Version"; @@ -421,23 +425,52 @@ pub fn get_user_agent_with_version(req_headers: &HeaderMap) -> eyre::Result Accept { - Accept::from_str( - req_headers.get(ACCEPT).and_then(|value| value.to_str().ok()).unwrap_or("application/json"), +/// Parse the ACCEPT header to get the type of response to encode the body with, +/// defaulting to JSON if missing. Returns an error if malformed or unsupported +/// types are requested. Supports requests with multiple ACCEPT headers or +/// headers with multiple media types. +pub fn get_accept_type(req_headers: &HeaderMap) -> eyre::Result { + let accept = Accept::from_str( + req_headers.get(ACCEPT).and_then(|value| value.to_str().ok()).unwrap_or(APPLICATION_JSON), ) - .unwrap_or(Accept::Json) + .map_err(|e| eyre::eyre!("invalid accept header: {e}"))?; + + if accept.media_types().count() == 0 { + // No valid media types found, default to JSON + return Ok(EncodingType::Json); + } + + // Get the SSZ and JSON media types if present + let mut ssz_type = false; + let mut json_type = false; + let mut unsupported_type = false; + accept.media_types().for_each(|mt| match mt.essence().to_string().as_str() { + APPLICATION_OCTET_STREAM => ssz_type = true, + APPLICATION_JSON | WILDCARD => json_type = true, + _ => unsupported_type = true, + }); + + // If SSZ is present, prioritize it + if ssz_type { + return Ok(EncodingType::Ssz); + } + // If there aren't any unsupported types, use JSON + if !unsupported_type { + return Ok(EncodingType::Json); + } + Err(eyre::eyre!("unsupported accept type")) } -/// Parse CONTENT TYPE header, default to JSON if missing or mal-formatted -pub fn get_content_type_header(req_headers: &HeaderMap) -> ContentType { - ContentType::from_str( +/// Parse CONTENT TYPE header to get the encoding type of the body, defaulting +/// to JSON if missing or malformed. +pub fn get_content_type(req_headers: &HeaderMap) -> EncodingType { + EncodingType::from_str( req_headers .get(CONTENT_TYPE) .and_then(|value| value.to_str().ok()) - .unwrap_or("application/json"), + .unwrap_or(APPLICATION_JSON), ) - .unwrap_or(ContentType::Json) + .unwrap_or(EncodingType::Json) } /// Parse CONSENSUS_VERSION header @@ -451,133 +484,91 @@ pub fn get_consensus_version_header(req_headers: &HeaderMap) -> Option .ok() } +/// Enum for types that can be used to encode incoming request bodies or +/// outgoing response bodies #[derive(Debug, Clone, Copy, PartialEq)] -pub enum ContentType { +pub enum EncodingType { + /// Body is UTF-8 encoded as JSON Json, + + /// Body is raw bytes representing an SSZ object Ssz, } -impl std::fmt::Display for ContentType { +impl std::fmt::Display for EncodingType { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - ContentType::Json => write!(f, "application/json"), - ContentType::Ssz => write!(f, "application/octet-stream"), + EncodingType::Json => write!(f, "application/json"), + EncodingType::Ssz => write!(f, "application/octet-stream"), } } } -impl FromStr for ContentType { +impl FromStr for EncodingType { type Err = String; fn from_str(value: &str) -> Result { match value { - "application/json" => Ok(ContentType::Json), - "application/octet-stream" => Ok(ContentType::Ssz), - _ => Ok(ContentType::Json), + "application/json" | "" => Ok(EncodingType::Json), + "application/octet-stream" => Ok(EncodingType::Ssz), + _ => Err(format!("unsupported encoding type: {value}")), } } } -#[derive(Debug, Clone, Copy, PartialEq)] -pub enum Accept { - Json, - Ssz, - Any, +pub enum BodyDeserializeError { + SerdeJsonError(serde_json::Error), + SszDecodeError(ssz::DecodeError), + UnsupportedMediaType, } -impl fmt::Display for Accept { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +impl Display for BodyDeserializeError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Accept::Ssz => write!(f, "application/octet-stream"), - Accept::Json => write!(f, "application/json"), - Accept::Any => write!(f, "*/*"), + BodyDeserializeError::SerdeJsonError(e) => write!(f, "JSON deserialization error: {e}"), + BodyDeserializeError::SszDecodeError(e) => { + write!(f, "SSZ deserialization error: {:?}", e) + } + BodyDeserializeError::UnsupportedMediaType => write!(f, "unsupported media type"), } } } -impl FromStr for Accept { - type Err = String; - - fn from_str(s: &str) -> Result { - let media_type_list = MediaTypeList::new(s); - - // [q-factor weighting]: https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.2 - // find the highest q-factor supported accept type - let mut highest_q = 0_u16; - let mut accept_type = None; - - const APPLICATION: &str = names::APPLICATION.as_str(); - const OCTET_STREAM: &str = names::OCTET_STREAM.as_str(); - const JSON: &str = names::JSON.as_str(); - const STAR: &str = names::_STAR.as_str(); - const Q: &str = names::Q.as_str(); - - media_type_list.into_iter().for_each(|item| { - if let Ok(MediaType { ty, subty, suffix: _, params }) = item { - let q_accept = match (ty.as_str(), subty.as_str()) { - (APPLICATION, OCTET_STREAM) => Some(Accept::Ssz), - (APPLICATION, JSON) => Some(Accept::Json), - (STAR, STAR) => Some(Accept::Any), - _ => None, - } - .map(|item_accept_type| { - let q_val = params - .iter() - .find_map(|(n, v)| match n.as_str() { - Q => { - Some((v.as_str().parse::().unwrap_or(0_f32) * 1000_f32) as u16) - } - _ => None, - }) - .or(Some(1000_u16)); - - (q_val.unwrap(), item_accept_type) - }); - - match q_accept { - Some((q, accept)) if q > highest_q => { - highest_q = q; - accept_type = Some(accept); - } - _ => (), - } +pub async fn deserialize_body( + headers: &HeaderMap, + body: Bytes, +) -> Result +where + T: serde::de::DeserializeOwned + ssz::Decode + 'static, +{ + if headers.contains_key(CONTENT_TYPE) { + return match get_content_type(headers) { + EncodingType::Json => { + serde_json::from_slice::(&body).map_err(BodyDeserializeError::SerdeJsonError) } - }); - accept_type.ok_or_else(|| "accept header is not supported".to_string()) + EncodingType::Ssz => { + T::from_ssz_bytes(&body).map_err(BodyDeserializeError::SszDecodeError) + } + }; } + + Err(BodyDeserializeError::UnsupportedMediaType) } #[must_use] -#[derive(Debug, Clone, Copy, Default)] -pub struct JsonOrSsz(pub T); +#[derive(Debug, Clone, Default)] +pub struct RawRequest { + pub body_bytes: Bytes, +} -impl FromRequest for JsonOrSsz +impl FromRequest for RawRequest where - T: serde::de::DeserializeOwned + ssz::Decode + 'static, S: Send + Sync, { type Rejection = AxumResponse; async fn from_request(req: Request, _state: &S) -> Result { - let headers = req.headers().clone(); - let content_type = headers.get(CONTENT_TYPE).and_then(|value| value.to_str().ok()); - let bytes = Bytes::from_request(req, _state).await.map_err(IntoResponse::into_response)?; - - if let Some(content_type) = content_type { - if content_type.starts_with(&ContentType::Json.to_string()) { - let payload: T = serde_json::from_slice(&bytes) - .map_err(|_| StatusCode::BAD_REQUEST.into_response())?; - return Ok(Self(payload)); - } - - if content_type.starts_with(&ContentType::Ssz.to_string()) { - let payload = T::from_ssz_bytes(&bytes) - .map_err(|_| StatusCode::BAD_REQUEST.into_response())?; - return Ok(Self(payload)); - } - } - - Err(StatusCode::UNSUPPORTED_MEDIA_TYPE.into_response()) + Ok(Self { body_bytes: bytes }) } } diff --git a/crates/pbs/src/error.rs b/crates/pbs/src/error.rs index 590c03d4..6c1c5c68 100644 --- a/crates/pbs/src/error.rs +++ b/crates/pbs/src/error.rs @@ -6,6 +6,7 @@ pub enum PbsClientError { NoResponse, NoPayload, Internal, + DecodeError(String), } impl PbsClientError { @@ -14,6 +15,7 @@ impl PbsClientError { PbsClientError::NoResponse => StatusCode::BAD_GATEWAY, PbsClientError::NoPayload => StatusCode::BAD_GATEWAY, PbsClientError::Internal => StatusCode::INTERNAL_SERVER_ERROR, + PbsClientError::DecodeError(_) => StatusCode::BAD_REQUEST, } } } @@ -24,6 +26,7 @@ impl IntoResponse for PbsClientError { PbsClientError::NoResponse => "no response from relays".to_string(), PbsClientError::NoPayload => "no payload from relays".to_string(), PbsClientError::Internal => "internal server error".to_string(), + PbsClientError::DecodeError(e) => format!("error decoding request: {e}"), }; (self.status_code(), msg).into_response() diff --git a/crates/pbs/src/routes/get_header.rs b/crates/pbs/src/routes/get_header.rs index 27a7951d..c7250687 100644 --- a/crates/pbs/src/routes/get_header.rs +++ b/crates/pbs/src/routes/get_header.rs @@ -6,7 +6,9 @@ use axum::{ }; use cb_common::{ pbs::{GetHeaderParams, VersionedResponse}, - utils::{Accept, CONSENSUS_VERSION_HEADER, get_accept_header, get_user_agent, ms_into_slot}, + utils::{ + CONSENSUS_VERSION_HEADER, EncodingType, get_accept_type, get_user_agent, ms_into_slot, + }, }; use reqwest::{StatusCode, header::CONTENT_TYPE}; use ssz::Encode; @@ -33,7 +35,14 @@ pub async fn handle_get_header>( let ua = get_user_agent(&req_headers); let ms_into_slot = ms_into_slot(params.slot, state.config.chain); - let accept_header = get_accept_header(&req_headers); + let accept_type = get_accept_type(&req_headers).map_err(|e| { + error!(%e, "error parsing accept header"); + PbsClientError::DecodeError(format!("error parsing accept header: {e}")) + }); + if let Err(e) = accept_type { + return Ok((StatusCode::BAD_REQUEST, e).into_response()); + } + let accept_type = accept_type.unwrap(); info!(ua, ms_into_slot, "new request"); @@ -42,8 +51,8 @@ pub async fn handle_get_header>( if let Some(max_bid) = res { info!(value_eth = format_ether(max_bid.value()), block_hash =% max_bid.block_hash(), "received header"); BEACON_NODE_STATUS.with_label_values(&["200", GET_HEADER_ENDPOINT_TAG]).inc(); - let response = match accept_header { - Accept::Ssz => { + let response = match accept_type { + EncodingType::Ssz => { let mut res = match &max_bid { VersionedResponse::Electra(max_bid) => { (StatusCode::OK, max_bid.as_ssz_bytes()).into_response() @@ -55,7 +64,7 @@ pub async fn handle_get_header>( return Ok((StatusCode::OK, axum::Json(max_bid)).into_response()); }; let Ok(content_type_header) = - HeaderValue::from_str(&format!("{}", Accept::Ssz)) + HeaderValue::from_str(&format!("{}", EncodingType::Ssz)) else { info!("sending response as JSON"); return Ok((StatusCode::OK, axum::Json(max_bid)).into_response()); @@ -66,9 +75,7 @@ pub async fn handle_get_header>( info!("sending response as SSZ"); res } - Accept::Json | Accept::Any => { - (StatusCode::OK, axum::Json(max_bid)).into_response() - } + EncodingType::Json => (StatusCode::OK, axum::Json(max_bid)).into_response(), }; Ok(response) } else { diff --git a/crates/pbs/src/routes/submit_block.rs b/crates/pbs/src/routes/submit_block.rs index 69bc588d..f7b41495 100644 --- a/crates/pbs/src/routes/submit_block.rs +++ b/crates/pbs/src/routes/submit_block.rs @@ -7,8 +7,8 @@ use axum::{ use cb_common::{ pbs::{BuilderApiVersion, SignedBlindedBeaconBlock, VersionedResponse}, utils::{ - CONSENSUS_VERSION_HEADER, ContentType, JsonOrSsz, get_accept_header, get_user_agent, - timestamp_of_slot_start_millis, utcnow_ms, + CONSENSUS_VERSION_HEADER, EncodingType, RawRequest, deserialize_body, get_accept_type, + get_user_agent, timestamp_of_slot_start_millis, utcnow_ms, }, }; use reqwest::{StatusCode, header::CONTENT_TYPE}; @@ -26,37 +26,32 @@ use crate::{ pub async fn handle_submit_block_v1>( state: State>, req_headers: HeaderMap, - signed_blinded_block: JsonOrSsz, + raw_request: RawRequest, ) -> Result { - handle_submit_block_impl::( - state, - req_headers, - signed_blinded_block, - BuilderApiVersion::V1, - ) - .await + handle_submit_block_impl::(state, req_headers, raw_request, BuilderApiVersion::V1).await } pub async fn handle_submit_block_v2>( state: State>, req_headers: HeaderMap, - signed_blinded_block: JsonOrSsz, + raw_request: RawRequest, ) -> Result { - handle_submit_block_impl::( - state, - req_headers, - signed_blinded_block, - BuilderApiVersion::V2, - ) - .await + handle_submit_block_impl::(state, req_headers, raw_request, BuilderApiVersion::V2).await } async fn handle_submit_block_impl>( State(state): State>, req_headers: HeaderMap, - JsonOrSsz(signed_blinded_block): JsonOrSsz, + raw_request: RawRequest, api_version: BuilderApiVersion, ) -> Result { + let signed_blinded_block = + deserialize_body::(&req_headers, raw_request.body_bytes) + .await + .map_err(|e| { + error!(%e, "failed to deserialize signed blinded block"); + PbsClientError::DecodeError(format!("failed to deserialize body: {e}")) + })?; tracing::Span::current().record("slot", signed_blinded_block.slot()); tracing::Span::current() .record("block_hash", tracing::field::debug(signed_blinded_block.block_hash())); @@ -71,7 +66,14 @@ async fn handle_submit_block_impl>( let block_hash = signed_blinded_block.block_hash(); let slot_start_ms = timestamp_of_slot_start_millis(slot, state.config.chain); let ua = get_user_agent(&req_headers); - let accept_header = get_accept_header(&req_headers); + let response_type = get_accept_type(&req_headers).map_err(|e| { + error!(%e, "error parsing accept header"); + PbsClientError::DecodeError(format!("error parsing accept header: {e}")) + }); + if let Err(e) = response_type { + return Ok((StatusCode::BAD_REQUEST, e.into_response())); + } + let response_type = response_type.unwrap(); info!(ua, ms_into_slot = now.saturating_sub(slot_start_ms), "new request"); @@ -84,12 +86,12 @@ async fn handle_submit_block_impl>( BEACON_NODE_STATUS .with_label_values(&["200", SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG]) .inc(); - let response = match accept_header { - cb_common::utils::Accept::Json | cb_common::utils::Accept::Any => { + let response = match response_type { + EncodingType::Json => { info!("sending response as JSON"); Json(payload_and_blobs).into_response() } - cb_common::utils::Accept::Ssz => { + EncodingType::Ssz => { let mut response = match &payload_and_blobs { VersionedResponse::Electra(payload_and_blobs) => { payload_and_blobs.as_ssz_bytes().into_response() @@ -105,7 +107,7 @@ async fn handle_submit_block_impl>( )); }; let Ok(content_type_header) = - HeaderValue::from_str(&ContentType::Ssz.to_string()) + HeaderValue::from_str(&EncodingType::Ssz.to_string()) else { info!("sending response as JSON"); return Ok(( diff --git a/tests/src/mock_relay.rs b/tests/src/mock_relay.rs index a7a7f0f0..cdf44fce 100644 --- a/tests/src/mock_relay.rs +++ b/tests/src/mock_relay.rs @@ -25,15 +25,15 @@ use cb_common::{ signature::sign_builder_root, types::{BlsSecretKey, Chain}, utils::{ - Accept, CONSENSUS_VERSION_HEADER, ForkName, JsonOrSsz, get_accept_header, - get_consensus_version_header, timestamp_of_slot_start_sec, + CONSENSUS_VERSION_HEADER, EncodingType, ForkName, RawRequest, deserialize_body, + get_accept_type, get_consensus_version_header, timestamp_of_slot_start_sec, }, }; use cb_pbs::MAX_SIZE_SUBMIT_BLOCK_RESPONSE; use reqwest::header::CONTENT_TYPE; use ssz::Encode; use tokio::net::TcpListener; -use tracing::debug; +use tracing::{debug, error}; use tree_hash::TreeHash; pub async fn start_mock_relay_service(state: Arc, port: u16) -> eyre::Result<()> { @@ -117,7 +117,12 @@ async fn handle_get_header( headers: HeaderMap, ) -> Response { state.received_get_header.fetch_add(1, Ordering::Relaxed); - let accept_header = get_accept_header(&headers); + let accept_type = get_accept_type(&headers) + .map_err(|e| (StatusCode::BAD_REQUEST, format!("error parsing accept header: {e}"))); + if let Err(e) = accept_type { + return e.into_response(); + } + let accept_header = accept_type.unwrap(); let consensus_version_header = get_consensus_version_header(&headers).unwrap_or(ForkName::Electra); @@ -141,11 +146,11 @@ async fn handle_get_header( let signature = sign_builder_root(state.chain, &state.signer, object_root); let response = SignedExecutionPayloadHeader { message, signature }; match accept_header { - Accept::Json | Accept::Any => { + EncodingType::Json => { let versioned_response = GetHeaderResponse::Electra(response); serde_json::to_vec(&versioned_response).unwrap() } - Accept::Ssz => response.as_ssz_bytes(), + EncodingType::Ssz => response.as_ssz_bytes(), } } _ => { @@ -188,10 +193,16 @@ async fn handle_register_validator( async fn handle_submit_block_v1( headers: HeaderMap, State(state): State>, - JsonOrSsz(submit_block): JsonOrSsz, + raw_request: RawRequest, ) -> Response { state.received_submit_block.fetch_add(1, Ordering::Relaxed); - let accept_header = get_accept_header(&headers); + let accept_header = get_accept_type(&headers); + if let Err(e) = accept_header { + error!(%e, "error parsing accept header"); + return (StatusCode::BAD_REQUEST, format!("error parsing accept header: {e}")) + .into_response(); + } + let accept_header = accept_header.unwrap(); let consensus_version_header = get_consensus_version_header(&headers).unwrap_or(ForkName::Electra); @@ -199,6 +210,17 @@ async fn handle_submit_block_v1( vec![1u8; 1 + MAX_SIZE_SUBMIT_BLOCK_RESPONSE] } else { let VersionedResponse::Electra(mut response) = SubmitBlindedBlockResponse::default(); + let submit_block = + deserialize_body::(&headers, raw_request.body_bytes) + .await + .map_err(|e| { + error!(%e, "failed to deserialize signed blinded block"); + (StatusCode::BAD_REQUEST, format!("failed to deserialize body: {e}")) + }); + if let Err(e) = submit_block { + return e.into_response(); + } + let submit_block = submit_block.unwrap(); response.execution_payload.block_hash = submit_block.block_hash(); let BlindedBeaconBlock::Electra(body) = submit_block.message; @@ -208,12 +230,12 @@ async fn handle_submit_block_v1( response.blobs_bundle.proofs.push(KzgProof([0; 48])).unwrap(); match accept_header { - Accept::Json | Accept::Any => { + EncodingType::Json => { // Response is versioned for JSON let response = VersionedResponse::Electra(response); serde_json::to_vec(&response).unwrap() } - Accept::Ssz => match consensus_version_header { + EncodingType::Ssz => match consensus_version_header { // Response isn't versioned for SSZ ForkName::Electra => response.as_ssz_bytes(), _ => { diff --git a/tests/src/mock_validator.rs b/tests/src/mock_validator.rs index d92b0b24..80aed0c2 100644 --- a/tests/src/mock_validator.rs +++ b/tests/src/mock_validator.rs @@ -2,7 +2,7 @@ use alloy::{primitives::B256, rpc::types::beacon::relay::ValidatorRegistration}; use cb_common::{ pbs::{BuilderApiVersion, RelayClient, SignedBlindedBeaconBlock}, types::BlsPublicKey, - utils::{Accept, CONSENSUS_VERSION_HEADER, ContentType, ForkName, bls_pubkey_from_hex}, + utils::{CONSENSUS_VERSION_HEADER, EncodingType, ForkName, bls_pubkey_from_hex}, }; use reqwest::{ Response, @@ -27,7 +27,7 @@ impl MockValidator { pub async fn do_get_header( &self, pubkey: Option, - accept: Option, + accept: Option, fork_name: ForkName, ) -> eyre::Result { let default_pubkey = bls_pubkey_from_hex( @@ -39,7 +39,7 @@ impl MockValidator { .comm_boost .client .get(url) - .header(ACCEPT, &accept.unwrap_or(Accept::Any).to_string()) + .header(ACCEPT, &accept.unwrap_or(EncodingType::Json).to_string()) .header(CONSENSUS_VERSION_HEADER, &fork_name.to_string()) .send() .await?; @@ -67,8 +67,8 @@ impl MockValidator { pub async fn do_submit_block_v1( &self, signed_blinded_block_opt: Option, - accept: Accept, - content_type: ContentType, + accept: EncodingType, + content_type: EncodingType, fork_name: ForkName, ) -> eyre::Result { self.do_submit_block_impl( @@ -84,8 +84,8 @@ impl MockValidator { pub async fn do_submit_block_v2( &self, signed_blinded_block_opt: Option, - accept: Accept, - content_type: ContentType, + accept: EncodingType, + content_type: EncodingType, fork_name: ForkName, ) -> eyre::Result { self.do_submit_block_impl( @@ -101,8 +101,8 @@ impl MockValidator { async fn do_submit_block_impl( &self, signed_blinded_block_opt: Option, - accept: Accept, - content_type: ContentType, + accept: EncodingType, + content_type: EncodingType, fork_name: ForkName, api_version: BuilderApiVersion, ) -> eyre::Result { @@ -111,8 +111,8 @@ impl MockValidator { let signed_blinded_block = signed_blinded_block_opt.unwrap_or_else(load_test_signed_blinded_block); let body = match content_type { - ContentType::Json => serde_json::to_vec(&signed_blinded_block).unwrap(), - ContentType::Ssz => signed_blinded_block.as_ssz_bytes(), + EncodingType::Json => serde_json::to_vec(&signed_blinded_block).unwrap(), + EncodingType::Ssz => signed_blinded_block.as_ssz_bytes(), }; Ok(self diff --git a/tests/tests/pbs_get_header.rs b/tests/tests/pbs_get_header.rs index cb35444f..7dd4a372 100644 --- a/tests/tests/pbs_get_header.rs +++ b/tests/tests/pbs_get_header.rs @@ -6,7 +6,7 @@ use cb_common::{ signature::sign_builder_root, signer::random_secret, types::Chain, - utils::{Accept, ForkName, timestamp_of_slot_start_sec}, + utils::{EncodingType, ForkName, timestamp_of_slot_start_sec}, }; use cb_pbs::{DefaultBuilderApi, PbsService, PbsState}; use cb_tests::{ @@ -89,7 +89,8 @@ async fn test_get_header_ssz() -> Result<()> { let mock_validator = MockValidator::new(pbs_port)?; info!("Sending get header"); - let res = mock_validator.do_get_header(None, Some(Accept::Ssz), ForkName::Electra).await?; + let res = + mock_validator.do_get_header(None, Some(EncodingType::Ssz), ForkName::Electra).await?; assert_eq!(res.status(), StatusCode::OK); let res: SignedExecutionPayloadHeader = diff --git a/tests/tests/pbs_mux.rs b/tests/tests/pbs_mux.rs index c0a52c34..3c1fd03f 100644 --- a/tests/tests/pbs_mux.rs +++ b/tests/tests/pbs_mux.rs @@ -4,7 +4,7 @@ use cb_common::{ config::RuntimeMuxConfig, signer::random_secret, types::Chain, - utils::{Accept, ContentType, ForkName}, + utils::{EncodingType, ForkName}, }; use cb_pbs::{DefaultBuilderApi, PbsService, PbsState}; use cb_tests::{ @@ -92,7 +92,7 @@ async fn test_mux() -> Result<()> { info!("Sending submit block v1"); assert_eq!( mock_validator - .do_submit_block_v1(None, Accept::Json, ContentType::Json, ForkName::Electra) + .do_submit_block_v1(None, EncodingType::Json, EncodingType::Json, ForkName::Electra) .await? .status(), StatusCode::OK @@ -103,7 +103,7 @@ async fn test_mux() -> Result<()> { info!("Sending submit block v2"); assert_eq!( mock_validator - .do_submit_block_v2(None, Accept::Json, ContentType::Json, ForkName::Electra) + .do_submit_block_v2(None, EncodingType::Json, EncodingType::Json, ForkName::Electra) .await? .status(), StatusCode::ACCEPTED diff --git a/tests/tests/pbs_post_blinded_blocks.rs b/tests/tests/pbs_post_blinded_blocks.rs index c17c8c29..38d1a500 100644 --- a/tests/tests/pbs_post_blinded_blocks.rs +++ b/tests/tests/pbs_post_blinded_blocks.rs @@ -4,7 +4,7 @@ use cb_common::{ pbs::{BuilderApiVersion, PayloadAndBlobsElectra, SubmitBlindedBlockResponse}, signer::random_secret, types::Chain, - utils::{Accept, ContentType, ForkName}, + utils::{EncodingType, ForkName}, }; use cb_pbs::{DefaultBuilderApi, PbsService, PbsState}; use cb_tests::{ @@ -19,7 +19,7 @@ use tracing::info; #[tokio::test] async fn test_submit_block_v1() -> Result<()> { - let res = submit_block_impl(3800, BuilderApiVersion::V1, ContentType::Json).await?; + let res = submit_block_impl(3800, BuilderApiVersion::V1, EncodingType::Json).await?; assert_eq!(res.status(), StatusCode::OK); let signed_blinded_block = load_test_signed_blinded_block(); @@ -31,7 +31,7 @@ async fn test_submit_block_v1() -> Result<()> { #[tokio::test] async fn test_submit_block_v2() -> Result<()> { - let res = submit_block_impl(3850, BuilderApiVersion::V2, ContentType::Json).await?; + let res = submit_block_impl(3850, BuilderApiVersion::V2, EncodingType::Json).await?; assert_eq!(res.status(), StatusCode::ACCEPTED); assert_eq!(res.bytes().await?.len(), 0); Ok(()) @@ -39,7 +39,7 @@ async fn test_submit_block_v2() -> Result<()> { #[tokio::test] async fn test_submit_block_v1_ssz() -> Result<()> { - let res = submit_block_impl(3810, BuilderApiVersion::V1, ContentType::Ssz).await?; + let res = submit_block_impl(3810, BuilderApiVersion::V1, EncodingType::Ssz).await?; assert_eq!(res.status(), StatusCode::OK); let signed_blinded_block = load_test_signed_blinded_block(); @@ -51,7 +51,7 @@ async fn test_submit_block_v1_ssz() -> Result<()> { #[tokio::test] async fn test_submit_block_v2_ssz() -> Result<()> { - let res = submit_block_impl(3860, BuilderApiVersion::V2, ContentType::Ssz).await?; + let res = submit_block_impl(3860, BuilderApiVersion::V2, EncodingType::Ssz).await?; assert_eq!(res.status(), StatusCode::ACCEPTED); assert_eq!(res.bytes().await?.len(), 0); Ok(()) @@ -80,7 +80,7 @@ async fn test_submit_block_too_large() -> Result<()> { let mock_validator = MockValidator::new(pbs_port)?; info!("Sending submit block"); let res = mock_validator - .do_submit_block_v1(None, Accept::Json, ContentType::Json, ForkName::Electra) + .do_submit_block_v1(None, EncodingType::Json, EncodingType::Json, ForkName::Electra) .await; // response size exceeds max size: max: 20971520 @@ -92,12 +92,9 @@ async fn test_submit_block_too_large() -> Result<()> { async fn submit_block_impl( pbs_port: u16, api_version: BuilderApiVersion, - serialization_mode: ContentType, + serialization_mode: EncodingType, ) -> Result { - let accept = match serialization_mode { - ContentType::Json => Accept::Json, - ContentType::Ssz => Accept::Ssz, - }; + let accept = serialization_mode; setup_test_env(); let signer = random_secret(); From 8c82b84e41c853e5a0c85262d78764535ec039f2 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Tue, 16 Sep 2025 11:06:56 -0400 Subject: [PATCH 10/11] Fixed clippy --- crates/common/src/utils.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/common/src/utils.rs b/crates/common/src/utils.rs index 6882af38..b8458c40 100644 --- a/crates/common/src/utils.rs +++ b/crates/common/src/utils.rs @@ -526,7 +526,7 @@ impl Display for BodyDeserializeError { match self { BodyDeserializeError::SerdeJsonError(e) => write!(f, "JSON deserialization error: {e}"), BodyDeserializeError::SszDecodeError(e) => { - write!(f, "SSZ deserialization error: {:?}", e) + write!(f, "SSZ deserialization error: {e:?}") } BodyDeserializeError::UnsupportedMediaType => write!(f, "unsupported media type"), } From de99bb9355a1a1c1c886ad57fa6b177f44a401a6 Mon Sep 17 00:00:00 2001 From: eltitanb Date: Thu, 23 Oct 2025 20:43:58 +0100 Subject: [PATCH 11/11] spawn --- crates/common/src/pbs/error.rs | 3 ++ crates/pbs/src/api.rs | 6 ++- .../pbs/src/mev_boost/register_validator.rs | 37 ++++++++------ crates/pbs/src/mev_boost/submit_block.rs | 48 +++++++++++-------- crates/pbs/src/routes/submit_block.rs | 10 ++-- 5 files changed, 63 insertions(+), 41 deletions(-) diff --git a/crates/common/src/pbs/error.rs b/crates/common/src/pbs/error.rs index fdfc45cd..77d942cd 100644 --- a/crates/common/src/pbs/error.rs +++ b/crates/common/src/pbs/error.rs @@ -25,6 +25,9 @@ pub enum PbsError { #[error("URL parsing error: {0}")] UrlParsing(#[from] url::ParseError), + + #[error("tokio join error: {0}")] + TokioJoinError(#[from] tokio::task::JoinError), } impl PbsError { diff --git a/crates/pbs/src/api.rs b/crates/pbs/src/api.rs index a3786f0b..594b7d36 100644 --- a/crates/pbs/src/api.rs +++ b/crates/pbs/src/api.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use async_trait::async_trait; use axum::{Router, http::HeaderMap}; use cb_common::pbs::{ @@ -34,10 +36,10 @@ pub trait BuilderApi: 'static { /// https://ethereum.github.io/builder-specs/#/Builder/submitBlindedBlock and /// https://ethereum.github.io/builder-specs/#/Builder/submitBlindedBlockV2 async fn submit_block( - signed_blinded_block: SignedBlindedBeaconBlock, + signed_blinded_block: Arc, req_headers: HeaderMap, state: PbsState, - api_version: &BuilderApiVersion, + api_version: BuilderApiVersion, ) -> eyre::Result> { mev_boost::submit_block(signed_blinded_block, req_headers, state, api_version).await } diff --git a/crates/pbs/src/mev_boost/register_validator.rs b/crates/pbs/src/mev_boost/register_validator.rs index 91df8996..15f68416 100644 --- a/crates/pbs/src/mev_boost/register_validator.rs +++ b/crates/pbs/src/mev_boost/register_validator.rs @@ -7,7 +7,10 @@ use cb_common::{ utils::{get_user_agent_with_version, read_chunked_body_with_max, utcnow_ms}, }; use eyre::bail; -use futures::future::{join_all, select_ok}; +use futures::{ + FutureExt, + future::{join_all, select_ok}, +}; use reqwest::header::{CONTENT_TYPE, USER_AGENT}; use tracing::{Instrument, debug, error}; use url::Url; @@ -49,32 +52,38 @@ pub async fn register_validator( for (n_regs, body) in bodies { for relay in state.all_relays().iter().cloned() { - handles.push(tokio::spawn( - send_register_validator_with_timeout( - n_regs, - body.clone(), - relay, - send_headers.clone(), - state.pbs_config().timeout_register_validator_ms, - state.pbs_config().register_validator_retry_limit, + handles.push( + tokio::spawn( + send_register_validator_with_timeout( + n_regs, + body.clone(), + relay, + send_headers.clone(), + state.pbs_config().timeout_register_validator_ms, + state.pbs_config().register_validator_retry_limit, + ) + .in_current_span(), ) - .in_current_span(), - )); + .map(|join_result| match join_result { + Ok(res) => res, + Err(err) => Err(PbsError::TokioJoinError(err)), + }), + ); } } if state.pbs_config().wait_all_registrations { // wait for all relays registrations to complete let results = join_all(handles).await; - if results.into_iter().any(|res| res.is_ok_and(|res| res.is_ok())) { + if results.into_iter().any(|res| res.is_ok()) { Ok(()) } else { bail!("No relay passed register_validator successfully") } } else { // return once first completes, others proceed in background - let result = select_ok(handles).await?; - match result.0 { + let result = select_ok(handles).await; + match result { Ok(_) => Ok(()), Err(_) => bail!("No relay passed register_validator successfully"), } diff --git a/crates/pbs/src/mev_boost/submit_block.rs b/crates/pbs/src/mev_boost/submit_block.rs index e8d8ea6b..2b10dcaa 100644 --- a/crates/pbs/src/mev_boost/submit_block.rs +++ b/crates/pbs/src/mev_boost/submit_block.rs @@ -1,5 +1,6 @@ use std::{ str::FromStr, + sync::Arc, time::{Duration, Instant}, }; @@ -14,7 +15,7 @@ use cb_common::{ }, utils::{get_user_agent_with_version, read_chunked_body_with_max, utcnow_ms}, }; -use futures::future::select_ok; +use futures::{FutureExt, future::select_ok}; use reqwest::header::USER_AGENT; use tracing::{debug, warn}; use url::Url; @@ -31,10 +32,10 @@ use crate::{ /// https://ethereum.github.io/builder-specs/#/Builder/submitBlindedBlockV2. Use `api_version` to /// distinguish between the two. pub async fn submit_block( - signed_blinded_block: SignedBlindedBeaconBlock, + signed_blinded_block: Arc, req_headers: HeaderMap, state: PbsState, - api_version: &BuilderApiVersion, + api_version: BuilderApiVersion, ) -> eyre::Result> { debug!(?req_headers, "received headers"); @@ -58,17 +59,22 @@ pub async fn submit_block( send_headers.insert(USER_AGENT, get_user_agent_with_version(&req_headers)?); send_headers.insert(HEADER_CONSENSUS_VERSION, consensus_version); - let relays = state.all_relays(); - let mut handles = Vec::with_capacity(relays.len()); - for relay in relays.iter() { - handles.push(Box::pin(submit_block_with_timeout( - &signed_blinded_block, - relay, - send_headers.clone(), - state.pbs_config().timeout_get_payload_ms, - api_version, - fork_name, - ))); + let mut handles = Vec::with_capacity(state.all_relays().len()); + for relay in state.all_relays().iter().cloned() { + handles.push( + tokio::spawn(submit_block_with_timeout( + signed_blinded_block.clone(), + relay, + send_headers.clone(), + state.pbs_config().timeout_get_payload_ms, + api_version, + fork_name, + )) + .map(|join_result| match join_result { + Ok(res) => res, + Err(err) => Err(PbsError::TokioJoinError(err)), + }), + ); } let results = select_ok(handles).await; @@ -81,14 +87,14 @@ pub async fn submit_block( /// Submit blinded block to relay, retry connection errors until the /// given timeout has passed async fn submit_block_with_timeout( - signed_blinded_block: &SignedBlindedBeaconBlock, - relay: &RelayClient, + signed_blinded_block: Arc, + relay: RelayClient, headers: HeaderMap, timeout_ms: u64, - api_version: &BuilderApiVersion, + api_version: BuilderApiVersion, fork_name: ForkName, ) -> Result, PbsError> { - let mut url = relay.submit_block_url(*api_version)?; + let mut url = relay.submit_block_url(api_version)?; let mut remaining_timeout_ms = timeout_ms; let mut retry = 0; let mut backoff = Duration::from_millis(250); @@ -97,12 +103,12 @@ async fn submit_block_with_timeout( let start_request = Instant::now(); match send_submit_block( url.clone(), - signed_blinded_block, - relay, + &signed_blinded_block, + &relay, headers.clone(), remaining_timeout_ms, retry, - api_version, + &api_version, fork_name, ) .await diff --git a/crates/pbs/src/routes/submit_block.rs b/crates/pbs/src/routes/submit_block.rs index 4784e6b1..004b601e 100644 --- a/crates/pbs/src/routes/submit_block.rs +++ b/crates/pbs/src/routes/submit_block.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use axum::{Json, extract::State, http::HeaderMap, response::IntoResponse}; use cb_common::{ pbs::{BuilderApiVersion, GetPayloadInfo, SignedBlindedBeaconBlock}, @@ -17,7 +19,7 @@ use crate::{ pub async fn handle_submit_block_v1>( state: State>, req_headers: HeaderMap, - signed_blinded_block: Json, + Json(signed_blinded_block): Json>, ) -> Result { handle_submit_block_impl::( state, @@ -31,7 +33,7 @@ pub async fn handle_submit_block_v1>( pub async fn handle_submit_block_v2>( state: State>, req_headers: HeaderMap, - signed_blinded_block: Json, + Json(signed_blinded_block): Json>, ) -> Result { handle_submit_block_impl::( state, @@ -45,7 +47,7 @@ pub async fn handle_submit_block_v2>( async fn handle_submit_block_impl>( State(state): State>, req_headers: HeaderMap, - Json(signed_blinded_block): Json, + signed_blinded_block: Arc, api_version: BuilderApiVersion, ) -> Result { tracing::Span::current().record("slot", signed_blinded_block.slot().as_u64() as i64); @@ -65,7 +67,7 @@ async fn handle_submit_block_impl>( info!(ua, ms_into_slot = now.saturating_sub(slot_start_ms), "new request"); - match A::submit_block(signed_blinded_block, req_headers, state, &api_version).await { + match A::submit_block(signed_blinded_block, req_headers, state, api_version).await { Ok(res) => match res { Some(block_response) => { trace!(?block_response);