Skip to content

Merge main to sigp-audit-fixes #361

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: sigp-audit-fixes
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
607 changes: 541 additions & 66 deletions Cargo.lock

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ axum-extra = { version = "0.10.0", features = ["typed-header"] }
base64 = "0.22.1"
bimap = { version = "0.6.3", features = ["serde"] }
blsful = "2.5"
blst = "0.3.11"
bytes = "1.10.1"
cb-cli = { path = "crates/cli" }
cb-common = { path = "crates/common" }
Expand All @@ -39,7 +38,6 @@ ctr = "0.9.2"
derive_more = { version = "2.0.1", features = ["deref", "display", "from", "into"] }
docker-compose-types = "0.16.0"
docker-image = "0.2.1"
eth2_keystore = { git = "https://github.com/sigp/lighthouse", tag = "v7.0.1" }
ethereum_serde_utils = "0.7.0"
ethereum_ssz = "0.8"
ethereum_ssz_derive = "0.8"
Expand All @@ -49,6 +47,8 @@ headers = "0.4.0"
indexmap = "2.2.6"
jsonwebtoken = { version = "9.3.1", default-features = false }
lazy_static = "1.5.0"
lh_eth2_keystore = { package = "eth2_keystore", git = "https://github.com/sigp/lighthouse", tag = "v7.1.0" }
lh_types = { package = "types", git = "https://github.com/sigp/lighthouse", tag = "v7.1.0" }
parking_lot = "0.12.3"
pbkdf2 = "0.12.2"
prometheus = "0.13.4"
Expand Down
20 changes: 8 additions & 12 deletions benches/pbs/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
use std::time::{Duration, Instant};

use alloy::{primitives::B256, rpc::types::beacon::BlsPublicKey};
use alloy::primitives::B256;
use cb_common::{
config::RelayConfig,
pbs::{GetHeaderResponse, RelayClient, RelayEntry},
signer::BlsSecretKey,
types::Chain,
utils::blst_pubkey_to_alloy,
types::{BlsPublicKey, BlsSecretKey, Chain},
utils::TestRandomSeed,
};
use cb_tests::mock_relay::{start_mock_relay_service, MockRelayState};
use comfy_table::Table;
Expand All @@ -18,9 +17,6 @@ mod config;
fn get_random_hash() -> B256 {
B256::from(rand::random::<[u8; 32]>())
}
fn get_random_pubkey() -> BlsPublicKey {
BlsPublicKey::ZERO
}

#[tokio::main]
async fn main() {
Expand All @@ -46,8 +42,8 @@ async fn main() {
// bench
for slot in 0..config.benchmark.n_slots {
let parent_hash = get_random_hash();
let validator_pubkey = get_random_pubkey();
let url = mock_validator.get_header_url(slot, parent_hash, validator_pubkey).unwrap();
let validator_pubkey = BlsPublicKey::test_random();
let url = mock_validator.get_header_url(slot, &parent_hash, &validator_pubkey).unwrap();

for _ in 0..config.benchmark.headers_per_slot {
let url = url.clone();
Expand Down Expand Up @@ -138,8 +134,8 @@ const MOCK_RELAY_SECRET: [u8; 32] = [
152, 98, 59, 240, 181, 131, 47, 1, 180, 255, 245,
];
async fn start_mock_relay(chain: Chain, relay_config: RelayConfig) {
let signer = BlsSecretKey::key_gen(&MOCK_RELAY_SECRET, &[]).unwrap();
let pubkey: BlsPublicKey = blst_pubkey_to_alloy(&signer.sk_to_pk());
let signer = BlsSecretKey::deserialize(&MOCK_RELAY_SECRET).unwrap();
let pubkey: BlsPublicKey = signer.public_key();

assert_eq!(relay_config.entry.pubkey, pubkey, "Expected relay pubkey to be 0xb060572f535ba5615b874ebfef757fbe6825352ad257e31d724e57fe25a067a13cfddd0f00cb17bf3a3d2e901a380c17");

Expand All @@ -152,7 +148,7 @@ async fn start_mock_relay(chain: Chain, relay_config: RelayConfig) {
}

fn get_mock_validator(bench: BenchConfig) -> RelayClient {
let entry = RelayEntry { id: bench.id, pubkey: BlsPublicKey::default(), url: bench.url };
let entry = RelayEntry { id: bench.id, pubkey: BlsPublicKey::test_random(), url: bench.url };
let config = RelayConfig {
entry,
id: None,
Expand Down
4 changes: 2 additions & 2 deletions bin/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ pub mod prelude {
signature::{
verify_proposer_commitment_signature_bls, verify_proposer_commitment_signature_ecdsa,
},
signer::{BlsPublicKey, BlsSignature, EcdsaSignature},
types::Chain,
signer::EcdsaSignature,
types::{BlsPublicKey, BlsSignature, Chain},
utils::{initialize_tracing_log, utcnow_ms, utcnow_ns, utcnow_sec, utcnow_us},
};
pub use cb_metrics::provider::MetricsProvider;
Expand Down
4 changes: 2 additions & 2 deletions crates/common/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,20 +12,20 @@ async-trait.workspace = true
axum.workspace = true
base64.workspace = true
bimap.workspace = true
blst.workspace = true
bytes.workspace = true
cipher.workspace = true
const_format.workspace = true
ctr.workspace = true
derive_more.workspace = true
docker-image.workspace = true
eth2_keystore.workspace = true
ethereum_serde_utils.workspace = true
ethereum_ssz.workspace = true
ethereum_ssz_derive.workspace = true
eyre.workspace = true
futures.workspace = true
jsonwebtoken.workspace = true
lh_eth2_keystore.workspace = true
lh_types.workspace = true
pbkdf2.workspace = true
rand.workspace = true
rayon.workspace = true
Expand Down
3 changes: 1 addition & 2 deletions crates/common/src/commit/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,7 @@ use crate::{
response::{BlsSignResponse, EcdsaSignResponse},
},
constants::SIGNER_JWT_EXPIRATION,
signer::BlsPublicKey,
types::{Jwt, ModuleId},
types::{BlsPublicKey, Jwt, ModuleId},
utils::create_jwt,
DEFAULT_REQUEST_TIMEOUT,
};
Expand Down
33 changes: 20 additions & 13 deletions crates/common/src/commit/request.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ use std::{
use alloy::{
hex,
primitives::{aliases::B32, Address, B256},
rpc::types::beacon::BlsSignature,
};
use serde::{Deserialize, Deserializer, Serialize};
use tree_hash::TreeHash;
Expand All @@ -16,20 +15,28 @@ use tree_hash_derive::TreeHash;
use crate::{
config::decode_string_to_map,
constants::COMMIT_BOOST_DOMAIN,
error::BlstErrorWrapper,
signature::verify_signed_message,
signer::BlsPublicKey,
types::{Chain, ModuleId},
types::{BlsPublicKey, BlsSignature, Chain, ModuleId},
};

pub trait ProxyId: AsRef<[u8]> + Debug + Clone + Copy + TreeHash + Display {}
pub trait ProxyId: Debug + Clone + TreeHash + Display {
fn to_bytes(&self) -> Vec<u8>;
}

impl ProxyId for Address {}
impl ProxyId for Address {
fn to_bytes(&self) -> Vec<u8> {
self.0.as_slice().to_vec()
}
}

impl ProxyId for BlsPublicKey {}
impl ProxyId for BlsPublicKey {
fn to_bytes(&self) -> Vec<u8> {
self.serialize().to_vec()
}
}

// GENERIC PROXY DELEGATION
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TreeHash)]
#[derive(Debug, Clone, Serialize, Deserialize, TreeHash)]
pub struct ProxyDelegation<T: ProxyId> {
pub delegator: BlsPublicKey,
pub proxy: T,
Expand All @@ -44,7 +51,7 @@ impl<T: ProxyId> fmt::Display for ProxyDelegation<T> {
}
}

#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SignedProxyDelegation<T: ProxyId> {
pub message: ProxyDelegation<T>,
/// Signature of message with the delegator keypair
Expand All @@ -55,7 +62,7 @@ pub type SignedProxyDelegationBls = SignedProxyDelegation<BlsPublicKey>;
pub type SignedProxyDelegationEcdsa = SignedProxyDelegation<Address>;

impl<T: ProxyId> SignedProxyDelegation<T> {
pub fn validate(&self, chain: Chain) -> Result<(), BlstErrorWrapper> {
pub fn validate(&self, chain: Chain) -> bool {
verify_signed_message(
chain,
&self.message.delegator,
Expand Down Expand Up @@ -262,7 +269,7 @@ mod tests {

#[test]
fn test_decode_response_signature() {
let data = r#""0xa3ffa9241f78279f1af04644cb8c79c2d8f02bcf0e28e2f186f6dcccac0a869c2be441fda50f0dea895cfce2e53f0989a3ffa9241f78279f1af04644cb8c79c2d8f02bcf0e28e2f186f6dcccac0a869c2be441fda50f0dea895cfce2e53f0989""#;
let data = r#""0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000""#;
let _: BlsSignature = serde_json::from_str(data).unwrap();

let data = r#""0x985b495f49d1b96db3bba3f6c5dd1810950317c10d4c2042bd316f338cdbe74359072e209b85e56ac492092d7860063dd096ca31b4e164ef27e3f8d508e656801c""#;
Expand Down Expand Up @@ -295,7 +302,7 @@ mod tests {
"delegator": "0xa3366b54f28e4bf1461926a3c70cdb0ec432b5c92554ecaae3742d33fb33873990cbed1761c68020e6d3c14d30a22050",
"proxy": "0xa3366b54f28e4bf1461926a3c70cdb0ec432b5c92554ecaae3742d33fb33873990cbed1761c68020e6d3c14d30a22050"
},
"signature": "0xa3ffa9241f78279f1af04644cb8c79c2d8f02bcf0e28e2f186f6dcccac0a869c2be441fda50f0dea895cfce2e53f0989a3ffa9241f78279f1af04644cb8c79c2d8f02bcf0e28e2f186f6dcccac0a869c2be441fda50f0dea895cfce2e53f0989"
"signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
}"#;

let _: SignedProxyDelegationBls = serde_json::from_str(data).unwrap();
Expand All @@ -305,7 +312,7 @@ mod tests {
"delegator": "0xa3366b54f28e4bf1461926a3c70cdb0ec432b5c92554ecaae3742d33fb33873990cbed1761c68020e6d3c14d30a22050",
"proxy": "0x4ca9939a8311a7cab3dde201b70157285fa81a9d"
},
"signature": "0xa3ffa9241f78279f1af04644cb8c79c2d8f02bcf0e28e2f186f6dcccac0a869c2be441fda50f0dea895cfce2e53f0989a3ffa9241f78279f1af04644cb8c79c2d8f02bcf0e28e2f186f6dcccac0a869c2be441fda50f0dea895cfce2e53f0989"
"signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
}"#;

let _: SignedProxyDelegationEcdsa = serde_json::from_str(data).unwrap();
Expand Down
10 changes: 5 additions & 5 deletions crates/common/src/commit/response.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
use alloy::{
primitives::{Address, B256, U256},
rpc::types::beacon::BlsSignature,
};
use alloy::primitives::{Address, B256, U256};
use serde::{Deserialize, Serialize};

use crate::signer::{BlsPublicKey, EcdsaSignature};
use crate::{
signer::EcdsaSignature,
types::{BlsPublicKey, BlsSignature},
};

#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct BlsSignResponse {
Expand Down
73 changes: 46 additions & 27 deletions crates/common/src/config/mux.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,21 +8,21 @@ use std::{
use alloy::{
primitives::{address, Address, U256},
providers::ProviderBuilder,
rpc::{client::RpcClient, types::beacon::BlsPublicKey},
rpc::{client::RpcClient, types::beacon::constants::BLS_PUBLIC_KEY_BYTES_LEN},
sol,
transports::http::Http,
};
use eyre::{bail, ensure, Context};
use reqwest::Client;
use serde::{Deserialize, Serialize};
use serde::{Deserialize, Deserializer, Serialize};
use tracing::{debug, info, warn};
use url::Url;

use super::{load_optional_env_var, PbsConfig, RelayConfig, MUX_PATH_ENV};
use crate::{
config::{remove_duplicate_keys, safe_read_http_response},
pbs::RelayClient,
types::Chain,
types::{BlsPublicKey, Chain},
};

#[derive(Debug, Deserialize, Serialize)]
Expand Down Expand Up @@ -103,8 +103,8 @@ impl PbsMuxes {
let config = Arc::new(config);

let runtime_config = RuntimeMuxConfig { id: mux.id, config, relays: relay_clients };
for pubkey in mux.validator_pubkeys.iter() {
configs.insert(*pubkey, runtime_config.clone());
for pubkey in mux.validator_pubkeys.into_iter() {
configs.insert(pubkey, runtime_config.clone());
}
}

Expand Down Expand Up @@ -296,7 +296,6 @@ async fn fetch_lido_registry_keys(
debug!("fetching {total_keys} total keys");

const CALL_BATCH_SIZE: u64 = 250u64;
const BLS_PK_LEN: usize = BlsPublicKey::len_bytes();

let mut keys = vec![];
let mut offset = 0;
Expand All @@ -311,13 +310,16 @@ async fn fetch_lido_registry_keys(
.pubkeys;

ensure!(
pubkeys.len() % BLS_PK_LEN == 0,
"unexpected number of keys in batch, expected multiple of {BLS_PK_LEN}, got {}",
pubkeys.len() % BLS_PUBLIC_KEY_BYTES_LEN == 0,
"unexpected number of keys in batch, expected multiple of {BLS_PUBLIC_KEY_BYTES_LEN}, got {}",
pubkeys.len()
);

for chunk in pubkeys.chunks(BLS_PK_LEN) {
keys.push(BlsPublicKey::try_from(chunk)?);
for chunk in pubkeys.chunks(BLS_PUBLIC_KEY_BYTES_LEN) {
keys.push(
BlsPublicKey::deserialize(chunk)
.map_err(|_| eyre::eyre!("invalid BLS public key"))?,
);
}

offset += limit;
Expand Down Expand Up @@ -356,10 +358,13 @@ async fn fetch_ssv_pubkeys(
);

let response = fetch_ssv_pubkeys_from_url(&url, http_timeout).await?;
pubkeys.extend(response.validators.iter().map(|v| v.pubkey).collect::<Vec<BlsPublicKey>>());
let fetched = response.validators.len();
pubkeys.extend(
response.validators.into_iter().map(|v| v.pubkey).collect::<Vec<BlsPublicKey>>(),
);
page += 1;

if response.validators.len() < MAX_PER_PAGE {
if fetched < MAX_PER_PAGE {
ensure!(
pubkeys.len() == response.pagination.total,
"expected {} keys, got {}",
Expand Down Expand Up @@ -397,12 +402,29 @@ struct SSVResponse {
pagination: SSVPagination,
}

#[derive(Deserialize)]
struct SSVValidator {
#[serde(rename = "public_key")]
pubkey: BlsPublicKey,
}

impl<'de> Deserialize<'de> for SSVValidator {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
#[derive(Deserialize)]
struct SSVValidator {
public_key: String,
}

let s = SSVValidator::deserialize(deserializer)?;
let bytes = alloy::hex::decode(&s.public_key).map_err(serde::de::Error::custom)?;
let pubkey = BlsPublicKey::deserialize(&bytes)
.map_err(|e| serde::de::Error::custom(format!("invalid BLS public key: {e:?}")))?;

Ok(Self { pubkey })
}
}

#[derive(Deserialize)]
struct SSVPagination {
total: usize,
Expand All @@ -412,15 +434,15 @@ struct SSVPagination {
mod tests {
use std::net::SocketAddr;

use alloy::{hex::FromHex, primitives::U256, providers::ProviderBuilder};
use alloy::{primitives::U256, providers::ProviderBuilder};
use axum::{response::Response, routing::get};
use tokio::{net::TcpListener, task::JoinHandle};
use url::Url;

use super::*;
use crate::{
config::{HTTP_TIMEOUT_SECONDS_DEFAULT, MUXER_HTTP_MAX_LENGTH},
utils::{set_ignore_content_length, ResponseReadError},
utils::{bls_pubkey_from_hex_unchecked, set_ignore_content_length, ResponseReadError},
};

const TEST_HTTP_TIMEOUT: u64 = 2;
Expand Down Expand Up @@ -448,8 +470,11 @@ mod tests {
.pubkeys;

let mut vec = vec![];
for chunk in pubkeys.chunks(BlsPublicKey::len_bytes()) {
vec.push(BlsPublicKey::try_from(chunk)?);
for chunk in pubkeys.chunks(BLS_PUBLIC_KEY_BYTES_LEN) {
vec.push(
BlsPublicKey::deserialize(chunk)
.map_err(|_| eyre::eyre!("invalid BLS public key"))?,
);
}

assert_eq!(vec.len(), LIMIT);
Expand All @@ -472,15 +497,9 @@ mod tests {
// NOTE: requires that ssv_data.json dpesn't change
assert_eq!(response.validators.len(), 3);
let expected_pubkeys = [
BlsPublicKey::from_hex(
"0x967ba17a3e7f82a25aa5350ec34d6923e28ad8237b5a41efe2c5e325240d74d87a015bf04634f21900963539c8229b2a",
)?,
BlsPublicKey::from_hex(
"0xac769e8cec802e8ffee34de3253be8f438a0c17ee84bdff0b6730280d24b5ecb77ebc9c985281b41ee3bda8663b6658c",
)?,
BlsPublicKey::from_hex(
"0x8c866a5a05f3d45c49b457e29365259021a509c5daa82e124f9701a960ee87b8902e87175315ab638a3d8b1115b23639",
)?,
bls_pubkey_from_hex_unchecked("967ba17a3e7f82a25aa5350ec34d6923e28ad8237b5a41efe2c5e325240d74d87a015bf04634f21900963539c8229b2a"),
bls_pubkey_from_hex_unchecked("ac769e8cec802e8ffee34de3253be8f438a0c17ee84bdff0b6730280d24b5ecb77ebc9c985281b41ee3bda8663b6658c"),
bls_pubkey_from_hex_unchecked("8c866a5a05f3d45c49b457e29365259021a509c5daa82e124f9701a960ee87b8902e87175315ab638a3d8b1115b23639"),
];
for (i, validator) in response.validators.iter().enumerate() {
assert_eq!(validator.pubkey, expected_pubkeys[i]);
Expand Down
Loading
Loading