Add validator-manager (#3502)

## Issue Addressed

Addresses #2557

## Proposed Changes

Adds the `lighthouse validator-manager` command, which provides:

- `lighthouse validator-manager create`
    - Creates a `validators.json` file and a `deposits.json` (same format as https://github.com/ethereum/staking-deposit-cli)
- `lighthouse validator-manager import`
    - Imports validators from a `validators.json` file to the VC via the HTTP API.
- `lighthouse validator-manager move`
    - Moves validators from one VC to the other, utilizing only the VC API.

## Additional Info

In 98bcb947c I've reduced some VC `ERRO` and `CRIT` warnings to `WARN` or `DEBG` for the case where a pubkey is missing from the validator store. These were being triggered when we removed a validator but still had it in caches. It seems to me that `UnknownPubkey` will only happen in the case where we've removed a validator, so downgrading the logs is prudent. All the logs are `DEBG` apart from attestations and blocks which are `WARN`. I thought having *some* logging about this condition might help us down the track.

In 856cd7e37d I've made the VC delete the corresponding password file when it's deleting a keystore. This seemed like nice hygiene. Notably, it'll only delete that password file after it scans the validator definitions and finds that no other validator is also using that password file.
This commit is contained in:
Paul Hauner
2023-08-08 00:03:22 +00:00
parent 5ea75052a8
commit 1373dcf076
69 changed files with 6060 additions and 745 deletions

View File

@@ -1,15 +1,16 @@
use crate::ValidatorStore;
use account_utils::validator_definitions::ValidatorDefinition;
use account_utils::validator_definitions::{PasswordStorage, ValidatorDefinition};
use account_utils::{
eth2_keystore::Keystore,
eth2_wallet::{bip39::Mnemonic, WalletBuilder},
random_mnemonic, random_password, ZeroizeString,
};
use eth2::lighthouse_vc::types::{self as api_types};
use slot_clock::SlotClock;
use std::path::Path;
use std::path::{Path, PathBuf};
use types::ChainSpec;
use types::EthSpec;
use validator_dir::Builder as ValidatorDirBuilder;
use validator_dir::{keystore_password_path, Builder as ValidatorDirBuilder};
/// Create some validator EIP-2335 keystores and store them on disk. Then, enroll the validators in
/// this validator client.
@@ -27,6 +28,7 @@ pub async fn create_validators_mnemonic<P: AsRef<Path>, T: 'static + SlotClock,
key_derivation_path_offset: Option<u32>,
validator_requests: &[api_types::ValidatorRequest],
validator_dir: P,
secrets_dir: Option<PathBuf>,
validator_store: &ValidatorStore<T, E>,
spec: &ChainSpec,
) -> Result<(Vec<api_types::CreatedValidator>, Mnemonic), warp::Rejection> {
@@ -95,7 +97,11 @@ pub async fn create_validators_mnemonic<P: AsRef<Path>, T: 'static + SlotClock,
))
})?;
let voting_password_storage =
get_voting_password_storage(&secrets_dir, &keystores.voting, &voting_password_string)?;
let validator_dir = ValidatorDirBuilder::new(validator_dir.as_ref().into())
.password_dir_opt(secrets_dir.clone())
.voting_keystore(keystores.voting, voting_password.as_bytes())
.withdrawal_keystore(keystores.withdrawal, withdrawal_password.as_bytes())
.create_eth1_tx_data(request.deposit_gwei, spec)
@@ -136,7 +142,7 @@ pub async fn create_validators_mnemonic<P: AsRef<Path>, T: 'static + SlotClock,
validator_store
.add_validator_keystore(
voting_keystore_path,
voting_password_string,
voting_password_storage,
request.enable,
request.graffiti.clone(),
request.suggested_fee_recipient,
@@ -185,3 +191,26 @@ pub async fn create_validators_web3signer<T: 'static + SlotClock, E: EthSpec>(
Ok(())
}
/// Attempts to return a `PasswordStorage::File` if `secrets_dir` is defined.
/// Otherwise, returns a `PasswordStorage::ValidatorDefinitions`.
pub fn get_voting_password_storage(
secrets_dir: &Option<PathBuf>,
voting_keystore: &Keystore,
voting_password_string: &ZeroizeString,
) -> Result<PasswordStorage, warp::Rejection> {
if let Some(secrets_dir) = &secrets_dir {
let password_path = keystore_password_path(secrets_dir, voting_keystore);
if password_path.exists() {
Err(warp_utils::reject::custom_server_error(
"Duplicate keystore password path".to_string(),
))
} else {
Ok(PasswordStorage::File(password_path))
}
} else {
Ok(PasswordStorage::ValidatorDefinitions(
voting_password_string.clone(),
))
}
}

View File

@@ -3,11 +3,14 @@ use crate::{
initialized_validators::Error, signing_method::SigningMethod, InitializedValidators,
ValidatorStore,
};
use account_utils::ZeroizeString;
use eth2::lighthouse_vc::std_types::{
DeleteKeystoreStatus, DeleteKeystoresRequest, DeleteKeystoresResponse, ImportKeystoreStatus,
ImportKeystoresRequest, ImportKeystoresResponse, InterchangeJsonStr, KeystoreJsonStr,
ListKeystoresResponse, SingleKeystoreResponse, Status,
use account_utils::{validator_definitions::PasswordStorage, ZeroizeString};
use eth2::lighthouse_vc::{
std_types::{
DeleteKeystoreStatus, DeleteKeystoresRequest, DeleteKeystoresResponse,
ImportKeystoreStatus, ImportKeystoresRequest, ImportKeystoresResponse, InterchangeJsonStr,
KeystoreJsonStr, ListKeystoresResponse, SingleKeystoreResponse, Status,
},
types::{ExportKeystoresResponse, SingleExportKeystoresResponse},
};
use eth2_keystore::Keystore;
use slog::{info, warn, Logger};
@@ -17,7 +20,7 @@ use std::sync::Arc;
use task_executor::TaskExecutor;
use tokio::runtime::Handle;
use types::{EthSpec, PublicKeyBytes};
use validator_dir::Builder as ValidatorDirBuilder;
use validator_dir::{keystore_password_path, Builder as ValidatorDirBuilder};
use warp::Rejection;
use warp_utils::reject::{custom_bad_request, custom_server_error};
@@ -58,6 +61,7 @@ pub fn list<T: SlotClock + 'static, E: EthSpec>(
pub fn import<T: SlotClock + 'static, E: EthSpec>(
request: ImportKeystoresRequest,
validator_dir: PathBuf,
secrets_dir: Option<PathBuf>,
validator_store: Arc<ValidatorStore<T, E>>,
task_executor: TaskExecutor,
log: Logger,
@@ -128,6 +132,7 @@ pub fn import<T: SlotClock + 'static, E: EthSpec>(
keystore,
password,
validator_dir.clone(),
secrets_dir.clone(),
&validator_store,
handle,
) {
@@ -158,6 +163,7 @@ fn import_single_keystore<T: SlotClock + 'static, E: EthSpec>(
keystore: Keystore,
password: ZeroizeString,
validator_dir_path: PathBuf,
secrets_dir: Option<PathBuf>,
validator_store: &ValidatorStore<T, E>,
handle: Handle,
) -> Result<ImportKeystoreStatus, String> {
@@ -179,6 +185,16 @@ fn import_single_keystore<T: SlotClock + 'static, E: EthSpec>(
}
}
let password_storage = if let Some(secrets_dir) = &secrets_dir {
let password_path = keystore_password_path(secrets_dir, &keystore);
if password_path.exists() {
return Ok(ImportKeystoreStatus::Duplicate);
}
PasswordStorage::File(password_path)
} else {
PasswordStorage::ValidatorDefinitions(password.clone())
};
// Check that the password is correct.
// In future we should re-structure to avoid the double decryption here. It's not as simple
// as removing this check because `add_validator_keystore` will break if provided with an
@@ -189,6 +205,7 @@ fn import_single_keystore<T: SlotClock + 'static, E: EthSpec>(
.map_err(|e| format!("incorrect password: {:?}", e))?;
let validator_dir = ValidatorDirBuilder::new(validator_dir_path)
.password_dir_opt(secrets_dir)
.voting_keystore(keystore, password.as_ref())
.store_withdrawal_keystore(false)
.build()
@@ -201,7 +218,7 @@ fn import_single_keystore<T: SlotClock + 'static, E: EthSpec>(
handle
.block_on(validator_store.add_validator_keystore(
voting_keystore_path,
password,
password_storage,
true,
None,
None,
@@ -219,11 +236,28 @@ pub fn delete<T: SlotClock + 'static, E: EthSpec>(
task_executor: TaskExecutor,
log: Logger,
) -> Result<DeleteKeystoresResponse, Rejection> {
let export_response = export(request, validator_store, task_executor, log)?;
Ok(DeleteKeystoresResponse {
data: export_response
.data
.into_iter()
.map(|response| response.status)
.collect(),
slashing_protection: export_response.slashing_protection,
})
}
pub fn export<T: SlotClock + 'static, E: EthSpec>(
request: DeleteKeystoresRequest,
validator_store: Arc<ValidatorStore<T, E>>,
task_executor: TaskExecutor,
log: Logger,
) -> Result<ExportKeystoresResponse, Rejection> {
// Remove from initialized validators.
let initialized_validators_rwlock = validator_store.initialized_validators();
let mut initialized_validators = initialized_validators_rwlock.write();
let mut statuses = request
let mut responses = request
.pubkeys
.iter()
.map(|pubkey_bytes| {
@@ -232,7 +266,7 @@ pub fn delete<T: SlotClock + 'static, E: EthSpec>(
&mut initialized_validators,
task_executor.clone(),
) {
Ok(status) => Status::ok(status),
Ok(status) => status,
Err(error) => {
warn!(
log,
@@ -240,7 +274,11 @@ pub fn delete<T: SlotClock + 'static, E: EthSpec>(
"pubkey" => ?pubkey_bytes,
"error" => ?error,
);
Status::error(DeleteKeystoreStatus::Error, error)
SingleExportKeystoresResponse {
status: Status::error(DeleteKeystoreStatus::Error, error),
validating_keystore: None,
validating_keystore_password: None,
}
}
}
})
@@ -263,19 +301,19 @@ pub fn delete<T: SlotClock + 'static, E: EthSpec>(
})?;
// Update stasuses based on availability of slashing protection data.
for (pubkey, status) in request.pubkeys.iter().zip(statuses.iter_mut()) {
if status.status == DeleteKeystoreStatus::NotFound
for (pubkey, response) in request.pubkeys.iter().zip(responses.iter_mut()) {
if response.status.status == DeleteKeystoreStatus::NotFound
&& slashing_protection
.data
.iter()
.any(|interchange_data| interchange_data.pubkey == *pubkey)
{
status.status = DeleteKeystoreStatus::NotActive;
response.status.status = DeleteKeystoreStatus::NotActive;
}
}
Ok(DeleteKeystoresResponse {
data: statuses,
Ok(ExportKeystoresResponse {
data: responses,
slashing_protection,
})
}
@@ -284,7 +322,7 @@ fn delete_single_keystore(
pubkey_bytes: &PublicKeyBytes,
initialized_validators: &mut InitializedValidators,
task_executor: TaskExecutor,
) -> Result<DeleteKeystoreStatus, String> {
) -> Result<SingleExportKeystoresResponse, String> {
if let Some(handle) = task_executor.handle() {
let pubkey = pubkey_bytes
.decompress()
@@ -292,9 +330,22 @@ fn delete_single_keystore(
match handle.block_on(initialized_validators.delete_definition_and_keystore(&pubkey, true))
{
Ok(_) => Ok(DeleteKeystoreStatus::Deleted),
Ok(Some(keystore_and_password)) => Ok(SingleExportKeystoresResponse {
status: Status::ok(DeleteKeystoreStatus::Deleted),
validating_keystore: Some(KeystoreJsonStr(keystore_and_password.keystore)),
validating_keystore_password: keystore_and_password.password,
}),
Ok(None) => Ok(SingleExportKeystoresResponse {
status: Status::ok(DeleteKeystoreStatus::Deleted),
validating_keystore: None,
validating_keystore_password: None,
}),
Err(e) => match e {
Error::ValidatorNotInitialized(_) => Ok(DeleteKeystoreStatus::NotFound),
Error::ValidatorNotInitialized(_) => Ok(SingleExportKeystoresResponse {
status: Status::ok(DeleteKeystoreStatus::NotFound),
validating_keystore: None,
validating_keystore_password: None,
}),
_ => Err(format!("unable to disable and delete: {:?}", e)),
},
}

View File

@@ -5,6 +5,8 @@ mod keystores;
mod remotekeys;
mod tests;
pub mod test_utils;
use crate::http_api::create_signed_voluntary_exit::create_signed_voluntary_exit;
use crate::{determine_graffiti, GraffitiFile, ValidatorStore};
use account_utils::{
@@ -12,7 +14,9 @@ use account_utils::{
validator_definitions::{SigningDefinition, ValidatorDefinition, Web3SignerDefinition},
};
pub use api_secret::ApiSecret;
use create_validator::{create_validators_mnemonic, create_validators_web3signer};
use create_validator::{
create_validators_mnemonic, create_validators_web3signer, get_voting_password_storage,
};
use eth2::lighthouse_vc::{
std_types::{AuthResponse, GetFeeRecipientResponse, GetGasLimitResponse},
types::{self as api_types, GenericResponse, Graffiti, PublicKey, PublicKeyBytes},
@@ -71,6 +75,7 @@ pub struct Context<T: SlotClock, E: EthSpec> {
pub api_secret: ApiSecret,
pub validator_store: Option<Arc<ValidatorStore<T, E>>>,
pub validator_dir: Option<PathBuf>,
pub secrets_dir: Option<PathBuf>,
pub graffiti_file: Option<GraffitiFile>,
pub graffiti_flag: Option<Graffiti>,
pub spec: ChainSpec,
@@ -88,6 +93,8 @@ pub struct Config {
pub listen_addr: IpAddr,
pub listen_port: u16,
pub allow_origin: Option<String>,
pub allow_keystore_export: bool,
pub store_passwords_in_secrets_dir: bool,
}
impl Default for Config {
@@ -97,6 +104,8 @@ impl Default for Config {
listen_addr: IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),
listen_port: 5062,
allow_origin: None,
allow_keystore_export: false,
store_passwords_in_secrets_dir: false,
}
}
}
@@ -121,6 +130,8 @@ pub fn serve<T: 'static + SlotClock + Clone, E: EthSpec>(
shutdown: impl Future<Output = ()> + Send + Sync + 'static,
) -> Result<(SocketAddr, impl Future<Output = ()>), Error> {
let config = &ctx.config;
let allow_keystore_export = config.allow_keystore_export;
let store_passwords_in_secrets_dir = config.store_passwords_in_secrets_dir;
let log = ctx.log.clone();
// Configure CORS.
@@ -187,6 +198,17 @@ pub fn serve<T: 'static + SlotClock + Clone, E: EthSpec>(
})
});
let inner_secrets_dir = ctx.secrets_dir.clone();
let secrets_dir_filter = warp::any().map(move || inner_secrets_dir.clone()).and_then(
|secrets_dir: Option<_>| async move {
secrets_dir.ok_or_else(|| {
warp_utils::reject::custom_not_found(
"secrets_dir directory is not initialized.".to_string(),
)
})
},
);
let inner_graffiti_file = ctx.graffiti_file.clone();
let graffiti_file_filter = warp::any().map(move || inner_graffiti_file.clone());
@@ -394,18 +416,21 @@ pub fn serve<T: 'static + SlotClock + Clone, E: EthSpec>(
.and(warp::path::end())
.and(warp::body::json())
.and(validator_dir_filter.clone())
.and(secrets_dir_filter.clone())
.and(validator_store_filter.clone())
.and(spec_filter.clone())
.and(signer.clone())
.and(task_executor_filter.clone())
.and_then(
|body: Vec<api_types::ValidatorRequest>,
validator_dir: PathBuf,
validator_store: Arc<ValidatorStore<T, E>>,
spec: Arc<ChainSpec>,
signer,
task_executor: TaskExecutor| {
move |body: Vec<api_types::ValidatorRequest>,
validator_dir: PathBuf,
secrets_dir: PathBuf,
validator_store: Arc<ValidatorStore<T, E>>,
spec: Arc<ChainSpec>,
signer,
task_executor: TaskExecutor| {
blocking_signed_json_task(signer, move || {
let secrets_dir = store_passwords_in_secrets_dir.then_some(secrets_dir);
if let Some(handle) = task_executor.handle() {
let (validators, mnemonic) =
handle.block_on(create_validators_mnemonic(
@@ -413,6 +438,7 @@ pub fn serve<T: 'static + SlotClock + Clone, E: EthSpec>(
None,
&body,
&validator_dir,
secrets_dir,
&validator_store,
&spec,
))?;
@@ -437,18 +463,21 @@ pub fn serve<T: 'static + SlotClock + Clone, E: EthSpec>(
.and(warp::path::end())
.and(warp::body::json())
.and(validator_dir_filter.clone())
.and(secrets_dir_filter.clone())
.and(validator_store_filter.clone())
.and(spec_filter)
.and(signer.clone())
.and(task_executor_filter.clone())
.and_then(
|body: api_types::CreateValidatorsMnemonicRequest,
validator_dir: PathBuf,
validator_store: Arc<ValidatorStore<T, E>>,
spec: Arc<ChainSpec>,
signer,
task_executor: TaskExecutor| {
move |body: api_types::CreateValidatorsMnemonicRequest,
validator_dir: PathBuf,
secrets_dir: PathBuf,
validator_store: Arc<ValidatorStore<T, E>>,
spec: Arc<ChainSpec>,
signer,
task_executor: TaskExecutor| {
blocking_signed_json_task(signer, move || {
let secrets_dir = store_passwords_in_secrets_dir.then_some(secrets_dir);
if let Some(handle) = task_executor.handle() {
let mnemonic =
mnemonic_from_phrase(body.mnemonic.as_str()).map_err(|e| {
@@ -463,6 +492,7 @@ pub fn serve<T: 'static + SlotClock + Clone, E: EthSpec>(
Some(body.key_derivation_path_offset),
&body.validators,
&validator_dir,
secrets_dir,
&validator_store,
&spec,
))?;
@@ -483,15 +513,17 @@ pub fn serve<T: 'static + SlotClock + Clone, E: EthSpec>(
.and(warp::path::end())
.and(warp::body::json())
.and(validator_dir_filter.clone())
.and(secrets_dir_filter.clone())
.and(validator_store_filter.clone())
.and(signer.clone())
.and(task_executor_filter.clone())
.and_then(
|body: api_types::KeystoreValidatorsPostRequest,
validator_dir: PathBuf,
validator_store: Arc<ValidatorStore<T, E>>,
signer,
task_executor: TaskExecutor| {
move |body: api_types::KeystoreValidatorsPostRequest,
validator_dir: PathBuf,
secrets_dir: PathBuf,
validator_store: Arc<ValidatorStore<T, E>>,
signer,
task_executor: TaskExecutor| {
blocking_signed_json_task(signer, move || {
// Check to ensure the password is correct.
let keypair = body
@@ -504,7 +536,12 @@ pub fn serve<T: 'static + SlotClock + Clone, E: EthSpec>(
))
})?;
let secrets_dir = store_passwords_in_secrets_dir.then_some(secrets_dir);
let password_storage =
get_voting_password_storage(&secrets_dir, &body.keystore, &body.password)?;
let validator_dir = ValidatorDirBuilder::new(validator_dir.clone())
.password_dir_opt(secrets_dir)
.voting_keystore(body.keystore.clone(), body.password.as_ref())
.store_withdrawal_keystore(false)
.build()
@@ -518,7 +555,6 @@ pub fn serve<T: 'static + SlotClock + Clone, E: EthSpec>(
// Drop validator dir so that `add_validator_keystore` can re-lock the keystore.
let voting_keystore_path = validator_dir.voting_keystore_path();
drop(validator_dir);
let voting_password = body.password.clone();
let graffiti = body.graffiti.clone();
let suggested_fee_recipient = body.suggested_fee_recipient;
let gas_limit = body.gas_limit;
@@ -529,7 +565,7 @@ pub fn serve<T: 'static + SlotClock + Clone, E: EthSpec>(
handle
.block_on(validator_store.add_validator_keystore(
voting_keystore_path,
voting_password,
password_storage,
body.enable,
graffiti,
suggested_fee_recipient,
@@ -698,6 +734,29 @@ pub fn serve<T: 'static + SlotClock + Clone, E: EthSpec>(
})
});
// DELETE /lighthouse/keystores
let delete_lighthouse_keystores = warp::path("lighthouse")
.and(warp::path("keystores"))
.and(warp::path::end())
.and(warp::body::json())
.and(signer.clone())
.and(validator_store_filter.clone())
.and(task_executor_filter.clone())
.and(log_filter.clone())
.and_then(
move |request, signer, validator_store, task_executor, log| {
blocking_signed_json_task(signer, move || {
if allow_keystore_export {
keystores::export(request, validator_store, task_executor, log)
} else {
Err(warp_utils::reject::custom_bad_request(
"keystore export is disabled".to_string(),
))
}
})
},
);
// Standard key-manager endpoints.
let eth_v1 = warp::path("eth").and(warp::path("v1"));
let std_keystores = eth_v1.and(warp::path("keystores")).and(warp::path::end());
@@ -982,13 +1041,28 @@ pub fn serve<T: 'static + SlotClock + Clone, E: EthSpec>(
.and(warp::body::json())
.and(signer.clone())
.and(validator_dir_filter)
.and(secrets_dir_filter)
.and(validator_store_filter.clone())
.and(task_executor_filter.clone())
.and(log_filter.clone())
.and_then(
|request, signer, validator_dir, validator_store, task_executor, log| {
move |request,
signer,
validator_dir,
secrets_dir,
validator_store,
task_executor,
log| {
let secrets_dir = store_passwords_in_secrets_dir.then_some(secrets_dir);
blocking_signed_json_task(signer, move || {
keystores::import(request, validator_dir, validator_store, task_executor, log)
keystores::import(
request,
validator_dir,
secrets_dir,
validator_store,
task_executor,
log,
)
})
},
);
@@ -1117,7 +1191,8 @@ pub fn serve<T: 'static + SlotClock + Clone, E: EthSpec>(
))
.or(warp::patch().and(patch_validators))
.or(warp::delete().and(
delete_fee_recipient
delete_lighthouse_keystores
.or(delete_fee_recipient)
.or(delete_gas_limit)
.or(delete_std_keystores)
.or(delete_std_remotekeys),

View File

@@ -0,0 +1,631 @@
use crate::doppelganger_service::DoppelgangerService;
use crate::key_cache::{KeyCache, CACHE_FILENAME};
use crate::{
http_api::{ApiSecret, Config as HttpConfig, Context},
initialized_validators::{InitializedValidators, OnDecryptFailure},
Config, ValidatorDefinitions, ValidatorStore,
};
use account_utils::{
eth2_wallet::WalletBuilder, mnemonic_from_phrase, random_mnemonic, random_password,
ZeroizeString,
};
use deposit_contract::decode_eth1_tx_data;
use eth2::{
lighthouse_vc::{http_client::ValidatorClientHttpClient, types::*},
types::ErrorMessage as ApiErrorMessage,
Error as ApiError,
};
use eth2_keystore::KeystoreBuilder;
use logging::test_logger;
use parking_lot::RwLock;
use sensitive_url::SensitiveUrl;
use slashing_protection::{SlashingDatabase, SLASHING_PROTECTION_FILENAME};
use slot_clock::{SlotClock, TestingSlotClock};
use std::future::Future;
use std::marker::PhantomData;
use std::net::{IpAddr, Ipv4Addr};
use std::sync::Arc;
use std::time::Duration;
use task_executor::test_utils::TestRuntime;
use tempfile::{tempdir, TempDir};
use tokio::sync::oneshot;
pub const PASSWORD_BYTES: &[u8] = &[42, 50, 37];
pub const TEST_DEFAULT_FEE_RECIPIENT: Address = Address::repeat_byte(42);
type E = MainnetEthSpec;
pub struct HdValidatorScenario {
pub count: usize,
pub specify_mnemonic: bool,
pub key_derivation_path_offset: u32,
pub disabled: Vec<usize>,
}
pub struct KeystoreValidatorScenario {
pub enabled: bool,
pub correct_password: bool,
}
pub struct Web3SignerValidatorScenario {
pub count: usize,
pub enabled: bool,
}
pub struct ApiTester {
pub client: ValidatorClientHttpClient,
pub initialized_validators: Arc<RwLock<InitializedValidators>>,
pub validator_store: Arc<ValidatorStore<TestingSlotClock, E>>,
pub url: SensitiveUrl,
pub api_token: String,
pub test_runtime: TestRuntime,
pub _server_shutdown: oneshot::Sender<()>,
pub validator_dir: TempDir,
pub secrets_dir: TempDir,
}
impl ApiTester {
pub async fn new() -> Self {
Self::new_with_http_config(Self::default_http_config()).await
}
pub async fn new_with_http_config(http_config: HttpConfig) -> Self {
let log = test_logger();
let validator_dir = tempdir().unwrap();
let secrets_dir = tempdir().unwrap();
let validator_defs = ValidatorDefinitions::open_or_create(validator_dir.path()).unwrap();
let initialized_validators = InitializedValidators::from_definitions(
validator_defs,
validator_dir.path().into(),
log.clone(),
)
.await
.unwrap();
let api_secret = ApiSecret::create_or_open(validator_dir.path()).unwrap();
let api_pubkey = api_secret.api_token();
let config = Config {
validator_dir: validator_dir.path().into(),
secrets_dir: secrets_dir.path().into(),
fee_recipient: Some(TEST_DEFAULT_FEE_RECIPIENT),
..Default::default()
};
let spec = E::default_spec();
let slashing_db_path = config.validator_dir.join(SLASHING_PROTECTION_FILENAME);
let slashing_protection = SlashingDatabase::open_or_create(&slashing_db_path).unwrap();
let slot_clock =
TestingSlotClock::new(Slot::new(0), Duration::from_secs(0), Duration::from_secs(1));
let test_runtime = TestRuntime::default();
let validator_store = Arc::new(ValidatorStore::<_, E>::new(
initialized_validators,
slashing_protection,
Hash256::repeat_byte(42),
spec,
Some(Arc::new(DoppelgangerService::new(log.clone()))),
slot_clock.clone(),
&config,
test_runtime.task_executor.clone(),
log.clone(),
));
validator_store
.register_all_in_doppelganger_protection_if_enabled()
.expect("Should attach doppelganger service");
let initialized_validators = validator_store.initialized_validators();
let context = Arc::new(Context {
task_executor: test_runtime.task_executor.clone(),
api_secret,
validator_dir: Some(validator_dir.path().into()),
secrets_dir: Some(secrets_dir.path().into()),
validator_store: Some(validator_store.clone()),
graffiti_file: None,
graffiti_flag: Some(Graffiti::default()),
spec: E::default_spec(),
config: http_config,
log,
sse_logging_components: None,
slot_clock,
_phantom: PhantomData,
});
let ctx = context;
let (shutdown_tx, shutdown_rx) = oneshot::channel();
let server_shutdown = async {
// It's not really interesting why this triggered, just that it happened.
let _ = shutdown_rx.await;
};
let (listening_socket, server) = super::serve(ctx, server_shutdown).unwrap();
tokio::spawn(server);
let url = SensitiveUrl::parse(&format!(
"http://{}:{}",
listening_socket.ip(),
listening_socket.port()
))
.unwrap();
let client = ValidatorClientHttpClient::new(url.clone(), api_pubkey.clone()).unwrap();
Self {
client,
initialized_validators,
validator_store,
url,
api_token: api_pubkey,
test_runtime,
_server_shutdown: shutdown_tx,
validator_dir,
secrets_dir,
}
}
pub fn default_http_config() -> HttpConfig {
HttpConfig {
enabled: true,
listen_addr: IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),
listen_port: 0,
allow_origin: None,
allow_keystore_export: true,
store_passwords_in_secrets_dir: false,
}
}
/// Checks that the key cache exists and can be decrypted with the current
/// set of known validators.
#[allow(clippy::await_holding_lock)] // This is a test, so it should be fine.
pub async fn ensure_key_cache_consistency(&self) {
assert!(
self.validator_dir.as_ref().join(CACHE_FILENAME).exists(),
"the key cache should exist"
);
let key_cache =
KeyCache::open_or_create(self.validator_dir.as_ref()).expect("should open a key cache");
self.initialized_validators
.read()
.decrypt_key_cache(key_cache, &mut <_>::default(), OnDecryptFailure::Error)
.await
.expect("key cache should decypt");
}
pub fn invalid_token_client(&self) -> ValidatorClientHttpClient {
let tmp = tempdir().unwrap();
let api_secret = ApiSecret::create_or_open(tmp.path()).unwrap();
let invalid_pubkey = api_secret.api_token();
ValidatorClientHttpClient::new(self.url.clone(), invalid_pubkey).unwrap()
}
pub async fn test_with_invalid_auth<F, A, T>(self, func: F) -> Self
where
F: Fn(ValidatorClientHttpClient) -> A,
A: Future<Output = Result<T, ApiError>>,
{
/*
* Test with an invalid Authorization header.
*/
match func(self.invalid_token_client()).await {
Err(ApiError::ServerMessage(ApiErrorMessage { code: 403, .. })) => (),
Err(other) => panic!("expected authorized error, got {:?}", other),
Ok(_) => panic!("expected authorized error, got Ok"),
}
/*
* Test with a missing Authorization header.
*/
let mut missing_token_client = self.client.clone();
missing_token_client.send_authorization_header(false);
match func(missing_token_client).await {
Err(ApiError::ServerMessage(ApiErrorMessage {
code: 401, message, ..
})) if message.contains("missing Authorization header") => (),
Err(other) => panic!("expected missing header error, got {:?}", other),
Ok(_) => panic!("expected missing header error, got Ok"),
}
self
}
pub fn invalidate_api_token(mut self) -> Self {
self.client = self.invalid_token_client();
self
}
pub async fn test_get_lighthouse_version_invalid(self) -> Self {
self.client.get_lighthouse_version().await.unwrap_err();
self
}
pub async fn test_get_lighthouse_spec(self) -> Self {
let result = self
.client
.get_lighthouse_spec::<ConfigAndPresetBellatrix>()
.await
.map(|res| ConfigAndPreset::Bellatrix(res.data))
.unwrap();
let expected = ConfigAndPreset::from_chain_spec::<E>(&E::default_spec(), None);
assert_eq!(result, expected);
self
}
pub async fn test_get_lighthouse_version(self) -> Self {
let result = self.client.get_lighthouse_version().await.unwrap().data;
let expected = VersionData {
version: lighthouse_version::version_with_platform(),
};
assert_eq!(result, expected);
self
}
#[cfg(target_os = "linux")]
pub async fn test_get_lighthouse_health(self) -> Self {
self.client.get_lighthouse_health().await.unwrap();
self
}
#[cfg(not(target_os = "linux"))]
pub async fn test_get_lighthouse_health(self) -> Self {
self.client.get_lighthouse_health().await.unwrap_err();
self
}
pub fn vals_total(&self) -> usize {
self.initialized_validators.read().num_total()
}
pub fn vals_enabled(&self) -> usize {
self.initialized_validators.read().num_enabled()
}
pub fn assert_enabled_validators_count(self, count: usize) -> Self {
assert_eq!(self.vals_enabled(), count);
self
}
pub fn assert_validators_count(self, count: usize) -> Self {
assert_eq!(self.vals_total(), count);
self
}
pub async fn create_hd_validators(self, s: HdValidatorScenario) -> Self {
let initial_vals = self.vals_total();
let initial_enabled_vals = self.vals_enabled();
let validators = (0..s.count)
.map(|i| ValidatorRequest {
enable: !s.disabled.contains(&i),
description: format!("boi #{}", i),
graffiti: None,
suggested_fee_recipient: None,
gas_limit: None,
builder_proposals: None,
deposit_gwei: E::default_spec().max_effective_balance,
})
.collect::<Vec<_>>();
let (response, mnemonic) = if s.specify_mnemonic {
let mnemonic = ZeroizeString::from(random_mnemonic().phrase().to_string());
let request = CreateValidatorsMnemonicRequest {
mnemonic: mnemonic.clone(),
key_derivation_path_offset: s.key_derivation_path_offset,
validators: validators.clone(),
};
let response = self
.client
.post_lighthouse_validators_mnemonic(&request)
.await
.unwrap()
.data;
(response, mnemonic)
} else {
assert_eq!(
s.key_derivation_path_offset, 0,
"cannot use a derivation offset without specifying a mnemonic"
);
let response = self
.client
.post_lighthouse_validators(validators.clone())
.await
.unwrap()
.data;
(response.validators.clone(), response.mnemonic)
};
assert_eq!(response.len(), s.count);
assert_eq!(self.vals_total(), initial_vals + s.count);
assert_eq!(
self.vals_enabled(),
initial_enabled_vals + s.count - s.disabled.len()
);
let server_vals = self.client.get_lighthouse_validators().await.unwrap().data;
assert_eq!(server_vals.len(), self.vals_total());
// Ensure the server lists all of these newly created validators.
for validator in &response {
assert!(server_vals
.iter()
.any(|server_val| server_val.voting_pubkey == validator.voting_pubkey));
}
/*
* Verify that we can regenerate all the keys from the mnemonic.
*/
let mnemonic = mnemonic_from_phrase(mnemonic.as_str()).unwrap();
let mut wallet = WalletBuilder::from_mnemonic(&mnemonic, PASSWORD_BYTES, "".to_string())
.unwrap()
.build()
.unwrap();
wallet
.set_nextaccount(s.key_derivation_path_offset)
.unwrap();
for item in response.iter().take(s.count) {
let keypairs = wallet
.next_validator(PASSWORD_BYTES, PASSWORD_BYTES, PASSWORD_BYTES)
.unwrap();
let voting_keypair = keypairs.voting.decrypt_keypair(PASSWORD_BYTES).unwrap();
assert_eq!(
item.voting_pubkey,
voting_keypair.pk.clone().into(),
"the locally generated voting pk should match the server response"
);
let withdrawal_keypair = keypairs.withdrawal.decrypt_keypair(PASSWORD_BYTES).unwrap();
let deposit_bytes = serde_utils::hex::decode(&item.eth1_deposit_tx_data).unwrap();
let (deposit_data, _) =
decode_eth1_tx_data(&deposit_bytes, E::default_spec().max_effective_balance)
.unwrap();
assert_eq!(
deposit_data.pubkey,
voting_keypair.pk.clone().into(),
"the locally generated voting pk should match the deposit data"
);
assert_eq!(
deposit_data.withdrawal_credentials,
Hash256::from_slice(&bls::get_withdrawal_credentials(
&withdrawal_keypair.pk,
E::default_spec().bls_withdrawal_prefix_byte
)),
"the locally generated withdrawal creds should match the deposit data"
);
assert_eq!(
deposit_data.signature,
deposit_data.create_signature(&voting_keypair.sk, &E::default_spec()),
"the locally-generated deposit sig should create the same deposit sig"
);
}
self
}
pub async fn create_keystore_validators(self, s: KeystoreValidatorScenario) -> Self {
let initial_vals = self.vals_total();
let initial_enabled_vals = self.vals_enabled();
let password = random_password();
let keypair = Keypair::random();
let keystore = KeystoreBuilder::new(&keypair, password.as_bytes(), String::new())
.unwrap()
.build()
.unwrap();
if !s.correct_password {
let request = KeystoreValidatorsPostRequest {
enable: s.enabled,
password: String::from_utf8(random_password().as_ref().to_vec())
.unwrap()
.into(),
keystore,
graffiti: None,
suggested_fee_recipient: None,
gas_limit: None,
builder_proposals: None,
};
self.client
.post_lighthouse_validators_keystore(&request)
.await
.unwrap_err();
return self;
}
let request = KeystoreValidatorsPostRequest {
enable: s.enabled,
password: String::from_utf8(password.as_ref().to_vec())
.unwrap()
.into(),
keystore,
graffiti: None,
suggested_fee_recipient: None,
gas_limit: None,
builder_proposals: None,
};
let response = self
.client
.post_lighthouse_validators_keystore(&request)
.await
.unwrap()
.data;
let num_enabled = s.enabled as usize;
assert_eq!(self.vals_total(), initial_vals + 1);
assert_eq!(self.vals_enabled(), initial_enabled_vals + num_enabled);
let server_vals = self.client.get_lighthouse_validators().await.unwrap().data;
assert_eq!(server_vals.len(), self.vals_total());
assert_eq!(response.voting_pubkey, keypair.pk.into());
assert_eq!(response.enabled, s.enabled);
self
}
pub async fn create_web3signer_validators(self, s: Web3SignerValidatorScenario) -> Self {
let initial_vals = self.vals_total();
let initial_enabled_vals = self.vals_enabled();
let request: Vec<_> = (0..s.count)
.map(|i| {
let kp = Keypair::random();
Web3SignerValidatorRequest {
enable: s.enabled,
description: format!("{}", i),
graffiti: None,
suggested_fee_recipient: None,
gas_limit: None,
builder_proposals: None,
voting_public_key: kp.pk,
url: format!("http://signer_{}.com/", i),
root_certificate_path: None,
request_timeout_ms: None,
client_identity_path: None,
client_identity_password: None,
}
})
.collect();
self.client
.post_lighthouse_validators_web3signer(&request)
.await
.unwrap();
assert_eq!(self.vals_total(), initial_vals + s.count);
if s.enabled {
assert_eq!(self.vals_enabled(), initial_enabled_vals + s.count);
} else {
assert_eq!(self.vals_enabled(), initial_enabled_vals);
};
self
}
pub async fn set_validator_enabled(self, index: usize, enabled: bool) -> Self {
let validator = &self.client.get_lighthouse_validators().await.unwrap().data[index];
self.client
.patch_lighthouse_validators(&validator.voting_pubkey, Some(enabled), None, None, None)
.await
.unwrap();
assert_eq!(
self.initialized_validators
.read()
.is_enabled(&validator.voting_pubkey.decompress().unwrap())
.unwrap(),
enabled
);
assert!(self
.client
.get_lighthouse_validators()
.await
.unwrap()
.data
.into_iter()
.find(|v| v.voting_pubkey == validator.voting_pubkey)
.map(|v| v.enabled == enabled)
.unwrap());
// Check the server via an individual request.
assert_eq!(
self.client
.get_lighthouse_validators_pubkey(&validator.voting_pubkey)
.await
.unwrap()
.unwrap()
.data
.enabled,
enabled
);
self
}
pub async fn set_gas_limit(self, index: usize, gas_limit: u64) -> Self {
let validator = &self.client.get_lighthouse_validators().await.unwrap().data[index];
self.client
.patch_lighthouse_validators(
&validator.voting_pubkey,
None,
Some(gas_limit),
None,
None,
)
.await
.unwrap();
self
}
pub async fn assert_gas_limit(self, index: usize, gas_limit: u64) -> Self {
let validator = &self.client.get_lighthouse_validators().await.unwrap().data[index];
assert_eq!(
self.validator_store.get_gas_limit(&validator.voting_pubkey),
gas_limit
);
self
}
pub async fn set_builder_proposals(self, index: usize, builder_proposals: bool) -> Self {
let validator = &self.client.get_lighthouse_validators().await.unwrap().data[index];
self.client
.patch_lighthouse_validators(
&validator.voting_pubkey,
None,
None,
Some(builder_proposals),
None,
)
.await
.unwrap();
self
}
pub async fn assert_builder_proposals(self, index: usize, builder_proposals: bool) -> Self {
let validator = &self.client.get_lighthouse_validators().await.unwrap().data[index];
assert_eq!(
self.validator_store
.get_builder_proposals(&validator.voting_pubkey),
builder_proposals
);
self
}
}

View File

@@ -31,10 +31,8 @@ use std::net::{IpAddr, Ipv4Addr};
use std::str::FromStr;
use std::sync::Arc;
use std::time::Duration;
use task_executor::TaskExecutor;
use task_executor::test_utils::TestRuntime;
use tempfile::{tempdir, TempDir};
use tokio::runtime::Runtime;
use tokio::sync::oneshot;
use types::graffiti::GraffitiString;
const PASSWORD_BYTES: &[u8] = &[42, 50, 37];
@@ -48,23 +46,12 @@ struct ApiTester {
validator_store: Arc<ValidatorStore<TestingSlotClock, E>>,
url: SensitiveUrl,
slot_clock: TestingSlotClock,
_server_shutdown: oneshot::Sender<()>,
_validator_dir: TempDir,
_runtime_shutdown: exit_future::Signal,
}
// Builds a runtime to be used in the testing configuration.
fn build_runtime() -> Arc<Runtime> {
Arc::new(
tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.expect("Should be able to build a testing runtime"),
)
_test_runtime: TestRuntime,
}
impl ApiTester {
pub async fn new(runtime: std::sync::Weak<Runtime>) -> Self {
pub async fn new() -> Self {
let log = test_logger();
let validator_dir = tempdir().unwrap();
@@ -100,9 +87,7 @@ impl ApiTester {
Duration::from_secs(1),
);
let (runtime_shutdown, exit) = exit_future::signal();
let (shutdown_tx, _) = futures::channel::mpsc::channel(1);
let executor = TaskExecutor::new(runtime.clone(), exit, log.clone(), shutdown_tx);
let test_runtime = TestRuntime::default();
let validator_store = Arc::new(ValidatorStore::<_, E>::new(
initialized_validators,
@@ -112,7 +97,7 @@ impl ApiTester {
Some(Arc::new(DoppelgangerService::new(log.clone()))),
slot_clock.clone(),
&config,
executor.clone(),
test_runtime.task_executor.clone(),
log.clone(),
));
@@ -123,9 +108,10 @@ impl ApiTester {
let initialized_validators = validator_store.initialized_validators();
let context = Arc::new(Context {
task_executor: executor,
task_executor: test_runtime.task_executor.clone(),
api_secret,
validator_dir: Some(validator_dir.path().into()),
secrets_dir: Some(secrets_dir.path().into()),
validator_store: Some(validator_store.clone()),
graffiti_file: None,
graffiti_flag: Some(Graffiti::default()),
@@ -135,6 +121,8 @@ impl ApiTester {
listen_addr: IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),
listen_port: 0,
allow_origin: None,
allow_keystore_export: true,
store_passwords_in_secrets_dir: false,
},
sse_logging_components: None,
log,
@@ -142,12 +130,8 @@ impl ApiTester {
_phantom: PhantomData,
});
let ctx = context.clone();
let (shutdown_tx, shutdown_rx) = oneshot::channel();
let server_shutdown = async {
// It's not really interesting why this triggered, just that it happened.
let _ = shutdown_rx.await;
};
let (listening_socket, server) = super::serve(ctx, server_shutdown).unwrap();
let (listening_socket, server) =
super::serve(ctx, test_runtime.task_executor.exit()).unwrap();
tokio::spawn(async { server.await });
@@ -166,9 +150,8 @@ impl ApiTester {
validator_store,
url,
slot_clock,
_server_shutdown: shutdown_tx,
_validator_dir: validator_dir,
_runtime_shutdown: runtime_shutdown,
_test_runtime: test_runtime,
}
}
@@ -676,387 +659,341 @@ struct Web3SignerValidatorScenario {
enabled: bool,
}
#[test]
fn invalid_pubkey() {
let runtime = build_runtime();
let weak_runtime = Arc::downgrade(&runtime);
runtime.block_on(async {
ApiTester::new(weak_runtime)
.await
.invalidate_api_token()
.test_get_lighthouse_version_invalid()
.await;
});
#[tokio::test]
async fn invalid_pubkey() {
ApiTester::new()
.await
.invalidate_api_token()
.test_get_lighthouse_version_invalid()
.await;
}
#[test]
fn routes_with_invalid_auth() {
let runtime = build_runtime();
let weak_runtime = Arc::downgrade(&runtime);
runtime.block_on(async {
ApiTester::new(weak_runtime)
.await
.test_with_invalid_auth(|client| async move { client.get_lighthouse_version().await })
.await
.test_with_invalid_auth(|client| async move { client.get_lighthouse_health().await })
.await
.test_with_invalid_auth(|client| async move {
client.get_lighthouse_spec::<types::Config>().await
})
.await
.test_with_invalid_auth(
|client| async move { client.get_lighthouse_validators().await },
)
.await
.test_with_invalid_auth(|client| async move {
client
.get_lighthouse_validators_pubkey(&PublicKeyBytes::empty())
.await
})
.await
.test_with_invalid_auth(|client| async move {
client
.post_lighthouse_validators(vec![ValidatorRequest {
enable: <_>::default(),
description: <_>::default(),
graffiti: <_>::default(),
suggested_fee_recipient: <_>::default(),
gas_limit: <_>::default(),
builder_proposals: <_>::default(),
deposit_gwei: <_>::default(),
}])
.await
})
.await
.test_with_invalid_auth(|client| async move {
client
.post_lighthouse_validators_mnemonic(&CreateValidatorsMnemonicRequest {
mnemonic: String::default().into(),
key_derivation_path_offset: <_>::default(),
validators: <_>::default(),
})
.await
})
.await
.test_with_invalid_auth(|client| async move {
let password = random_password();
let keypair = Keypair::random();
let keystore = KeystoreBuilder::new(&keypair, password.as_bytes(), String::new())
.unwrap()
.build()
.unwrap();
client
.post_lighthouse_validators_keystore(&KeystoreValidatorsPostRequest {
password: String::default().into(),
enable: <_>::default(),
keystore,
graffiti: <_>::default(),
suggested_fee_recipient: <_>::default(),
gas_limit: <_>::default(),
builder_proposals: <_>::default(),
})
.await
})
.await
.test_with_invalid_auth(|client| async move {
client
.patch_lighthouse_validators(
&PublicKeyBytes::empty(),
Some(false),
None,
None,
None,
)
.await
})
.await
.test_with_invalid_auth(|client| async move { client.get_keystores().await })
.await
.test_with_invalid_auth(|client| async move {
let password = random_password_string();
let keypair = Keypair::random();
let keystore = KeystoreBuilder::new(&keypair, password.as_ref(), String::new())
.unwrap()
.build()
.map(KeystoreJsonStr)
.unwrap();
client
.post_keystores(&ImportKeystoresRequest {
keystores: vec![keystore],
passwords: vec![password],
slashing_protection: None,
})
.await
})
.await
.test_with_invalid_auth(|client| async move {
let keypair = Keypair::random();
client
.delete_keystores(&DeleteKeystoresRequest {
pubkeys: vec![keypair.pk.compress()],
})
.await
})
.await
});
#[tokio::test]
async fn routes_with_invalid_auth() {
ApiTester::new()
.await
.test_with_invalid_auth(|client| async move { client.get_lighthouse_version().await })
.await
.test_with_invalid_auth(|client| async move { client.get_lighthouse_health().await })
.await
.test_with_invalid_auth(|client| async move {
client.get_lighthouse_spec::<types::Config>().await
})
.await
.test_with_invalid_auth(|client| async move { client.get_lighthouse_validators().await })
.await
.test_with_invalid_auth(|client| async move {
client
.get_lighthouse_validators_pubkey(&PublicKeyBytes::empty())
.await
})
.await
.test_with_invalid_auth(|client| async move {
client
.post_lighthouse_validators(vec![ValidatorRequest {
enable: <_>::default(),
description: <_>::default(),
graffiti: <_>::default(),
suggested_fee_recipient: <_>::default(),
gas_limit: <_>::default(),
builder_proposals: <_>::default(),
deposit_gwei: <_>::default(),
}])
.await
})
.await
.test_with_invalid_auth(|client| async move {
client
.post_lighthouse_validators_mnemonic(&CreateValidatorsMnemonicRequest {
mnemonic: String::default().into(),
key_derivation_path_offset: <_>::default(),
validators: <_>::default(),
})
.await
})
.await
.test_with_invalid_auth(|client| async move {
let password = random_password();
let keypair = Keypair::random();
let keystore = KeystoreBuilder::new(&keypair, password.as_bytes(), String::new())
.unwrap()
.build()
.unwrap();
client
.post_lighthouse_validators_keystore(&KeystoreValidatorsPostRequest {
password: String::default().into(),
enable: <_>::default(),
keystore,
graffiti: <_>::default(),
suggested_fee_recipient: <_>::default(),
gas_limit: <_>::default(),
builder_proposals: <_>::default(),
})
.await
})
.await
.test_with_invalid_auth(|client| async move {
client
.patch_lighthouse_validators(
&PublicKeyBytes::empty(),
Some(false),
None,
None,
None,
)
.await
})
.await
.test_with_invalid_auth(|client| async move { client.get_keystores().await })
.await
.test_with_invalid_auth(|client| async move {
let password = random_password_string();
let keypair = Keypair::random();
let keystore = KeystoreBuilder::new(&keypair, password.as_ref(), String::new())
.unwrap()
.build()
.map(KeystoreJsonStr)
.unwrap();
client
.post_keystores(&ImportKeystoresRequest {
keystores: vec![keystore],
passwords: vec![password],
slashing_protection: None,
})
.await
})
.await
.test_with_invalid_auth(|client| async move {
let keypair = Keypair::random();
client
.delete_keystores(&DeleteKeystoresRequest {
pubkeys: vec![keypair.pk.compress()],
})
.await
})
.await;
}
#[test]
fn simple_getters() {
let runtime = build_runtime();
let weak_runtime = Arc::downgrade(&runtime);
runtime.block_on(async {
ApiTester::new(weak_runtime)
.await
.test_get_lighthouse_version()
.await
.test_get_lighthouse_health()
.await
.test_get_lighthouse_spec()
.await;
});
#[tokio::test]
async fn simple_getters() {
ApiTester::new()
.await
.test_get_lighthouse_version()
.await
.test_get_lighthouse_health()
.await
.test_get_lighthouse_spec()
.await;
}
#[test]
fn hd_validator_creation() {
let runtime = build_runtime();
let weak_runtime = Arc::downgrade(&runtime);
runtime.block_on(async {
ApiTester::new(weak_runtime)
.await
.assert_enabled_validators_count(0)
.assert_validators_count(0)
.create_hd_validators(HdValidatorScenario {
count: 2,
specify_mnemonic: true,
key_derivation_path_offset: 0,
disabled: vec![],
})
.await
.assert_enabled_validators_count(2)
.assert_validators_count(2)
.create_hd_validators(HdValidatorScenario {
count: 1,
specify_mnemonic: false,
key_derivation_path_offset: 0,
disabled: vec![0],
})
.await
.assert_enabled_validators_count(2)
.assert_validators_count(3)
.create_hd_validators(HdValidatorScenario {
count: 0,
specify_mnemonic: true,
key_derivation_path_offset: 4,
disabled: vec![],
})
.await
.assert_enabled_validators_count(2)
.assert_validators_count(3);
});
#[tokio::test]
async fn hd_validator_creation() {
ApiTester::new()
.await
.assert_enabled_validators_count(0)
.assert_validators_count(0)
.create_hd_validators(HdValidatorScenario {
count: 2,
specify_mnemonic: true,
key_derivation_path_offset: 0,
disabled: vec![],
})
.await
.assert_enabled_validators_count(2)
.assert_validators_count(2)
.create_hd_validators(HdValidatorScenario {
count: 1,
specify_mnemonic: false,
key_derivation_path_offset: 0,
disabled: vec![0],
})
.await
.assert_enabled_validators_count(2)
.assert_validators_count(3)
.create_hd_validators(HdValidatorScenario {
count: 0,
specify_mnemonic: true,
key_derivation_path_offset: 4,
disabled: vec![],
})
.await
.assert_enabled_validators_count(2)
.assert_validators_count(3);
}
#[test]
fn validator_exit() {
let runtime = build_runtime();
let weak_runtime = Arc::downgrade(&runtime);
runtime.block_on(async {
ApiTester::new(weak_runtime)
.await
.create_hd_validators(HdValidatorScenario {
count: 2,
specify_mnemonic: false,
key_derivation_path_offset: 0,
disabled: vec![],
})
.await
.assert_enabled_validators_count(2)
.assert_validators_count(2)
.test_sign_voluntary_exits(0, None)
.await
.test_sign_voluntary_exits(0, Some(Epoch::new(256)))
.await;
});
#[tokio::test]
async fn validator_exit() {
ApiTester::new()
.await
.create_hd_validators(HdValidatorScenario {
count: 2,
specify_mnemonic: false,
key_derivation_path_offset: 0,
disabled: vec![],
})
.await
.assert_enabled_validators_count(2)
.assert_validators_count(2)
.test_sign_voluntary_exits(0, None)
.await
.test_sign_voluntary_exits(0, Some(Epoch::new(256)))
.await;
}
#[test]
fn validator_enabling() {
let runtime = build_runtime();
let weak_runtime = Arc::downgrade(&runtime);
runtime.block_on(async {
ApiTester::new(weak_runtime)
.await
.create_hd_validators(HdValidatorScenario {
count: 2,
specify_mnemonic: false,
key_derivation_path_offset: 0,
disabled: vec![],
})
.await
.assert_enabled_validators_count(2)
.assert_validators_count(2)
.set_validator_enabled(0, false)
.await
.assert_enabled_validators_count(1)
.assert_validators_count(2)
.set_validator_enabled(0, true)
.await
.assert_enabled_validators_count(2)
.assert_validators_count(2);
});
#[tokio::test]
async fn validator_enabling() {
ApiTester::new()
.await
.create_hd_validators(HdValidatorScenario {
count: 2,
specify_mnemonic: false,
key_derivation_path_offset: 0,
disabled: vec![],
})
.await
.assert_enabled_validators_count(2)
.assert_validators_count(2)
.set_validator_enabled(0, false)
.await
.assert_enabled_validators_count(1)
.assert_validators_count(2)
.set_validator_enabled(0, true)
.await
.assert_enabled_validators_count(2)
.assert_validators_count(2);
}
#[test]
fn validator_gas_limit() {
let runtime = build_runtime();
let weak_runtime = Arc::downgrade(&runtime);
runtime.block_on(async {
ApiTester::new(weak_runtime)
.await
.create_hd_validators(HdValidatorScenario {
count: 2,
specify_mnemonic: false,
key_derivation_path_offset: 0,
disabled: vec![],
})
.await
.assert_enabled_validators_count(2)
.assert_validators_count(2)
.set_gas_limit(0, 500)
.await
.assert_gas_limit(0, 500)
.await
// Update gas limit while validator is disabled.
.set_validator_enabled(0, false)
.await
.assert_enabled_validators_count(1)
.assert_validators_count(2)
.set_gas_limit(0, 1000)
.await
.set_validator_enabled(0, true)
.await
.assert_enabled_validators_count(2)
.assert_gas_limit(0, 1000)
.await
});
#[tokio::test]
async fn validator_gas_limit() {
ApiTester::new()
.await
.create_hd_validators(HdValidatorScenario {
count: 2,
specify_mnemonic: false,
key_derivation_path_offset: 0,
disabled: vec![],
})
.await
.assert_enabled_validators_count(2)
.assert_validators_count(2)
.set_gas_limit(0, 500)
.await
.assert_gas_limit(0, 500)
.await
// Update gas limit while validator is disabled.
.set_validator_enabled(0, false)
.await
.assert_enabled_validators_count(1)
.assert_validators_count(2)
.set_gas_limit(0, 1000)
.await
.set_validator_enabled(0, true)
.await
.assert_enabled_validators_count(2)
.assert_gas_limit(0, 1000)
.await;
}
#[test]
fn validator_builder_proposals() {
let runtime = build_runtime();
let weak_runtime = Arc::downgrade(&runtime);
runtime.block_on(async {
ApiTester::new(weak_runtime)
.await
.create_hd_validators(HdValidatorScenario {
count: 2,
specify_mnemonic: false,
key_derivation_path_offset: 0,
disabled: vec![],
})
.await
.assert_enabled_validators_count(2)
.assert_validators_count(2)
.set_builder_proposals(0, true)
.await
// Test setting builder proposals while the validator is disabled
.set_validator_enabled(0, false)
.await
.assert_enabled_validators_count(1)
.assert_validators_count(2)
.set_builder_proposals(0, false)
.await
.set_validator_enabled(0, true)
.await
.assert_enabled_validators_count(2)
.assert_builder_proposals(0, false)
.await
});
#[tokio::test]
async fn validator_builder_proposals() {
ApiTester::new()
.await
.create_hd_validators(HdValidatorScenario {
count: 2,
specify_mnemonic: false,
key_derivation_path_offset: 0,
disabled: vec![],
})
.await
.assert_enabled_validators_count(2)
.assert_validators_count(2)
.set_builder_proposals(0, true)
.await
// Test setting builder proposals while the validator is disabled
.set_validator_enabled(0, false)
.await
.assert_enabled_validators_count(1)
.assert_validators_count(2)
.set_builder_proposals(0, false)
.await
.set_validator_enabled(0, true)
.await
.assert_enabled_validators_count(2)
.assert_builder_proposals(0, false)
.await;
}
#[test]
fn validator_graffiti() {
let runtime = build_runtime();
let weak_runtime = Arc::downgrade(&runtime);
runtime.block_on(async {
ApiTester::new(weak_runtime)
.await
.create_hd_validators(HdValidatorScenario {
count: 2,
specify_mnemonic: false,
key_derivation_path_offset: 0,
disabled: vec![],
})
.await
.assert_enabled_validators_count(2)
.assert_validators_count(2)
.set_graffiti(0, "Mr F was here")
.await
.assert_graffiti(0, "Mr F was here")
.await
// Test setting graffiti while the validator is disabled
.set_validator_enabled(0, false)
.await
.assert_enabled_validators_count(1)
.assert_validators_count(2)
.set_graffiti(0, "Mr F was here again")
.await
.set_validator_enabled(0, true)
.await
.assert_enabled_validators_count(2)
.assert_graffiti(0, "Mr F was here again")
.await
});
#[tokio::test]
async fn validator_graffiti() {
ApiTester::new()
.await
.create_hd_validators(HdValidatorScenario {
count: 2,
specify_mnemonic: false,
key_derivation_path_offset: 0,
disabled: vec![],
})
.await
.assert_enabled_validators_count(2)
.assert_validators_count(2)
.set_graffiti(0, "Mr F was here")
.await
.assert_graffiti(0, "Mr F was here")
.await
// Test setting graffiti while the validator is disabled
.set_validator_enabled(0, false)
.await
.assert_enabled_validators_count(1)
.assert_validators_count(2)
.set_graffiti(0, "Mr F was here again")
.await
.set_validator_enabled(0, true)
.await
.assert_enabled_validators_count(2)
.assert_graffiti(0, "Mr F was here again")
.await;
}
#[test]
fn keystore_validator_creation() {
let runtime = build_runtime();
let weak_runtime = Arc::downgrade(&runtime);
runtime.block_on(async {
ApiTester::new(weak_runtime)
.await
.assert_enabled_validators_count(0)
.assert_validators_count(0)
.create_keystore_validators(KeystoreValidatorScenario {
correct_password: true,
enabled: true,
})
.await
.assert_enabled_validators_count(1)
.assert_validators_count(1)
.create_keystore_validators(KeystoreValidatorScenario {
correct_password: false,
enabled: true,
})
.await
.assert_enabled_validators_count(1)
.assert_validators_count(1)
.create_keystore_validators(KeystoreValidatorScenario {
correct_password: true,
enabled: false,
})
.await
.assert_enabled_validators_count(1)
.assert_validators_count(2);
});
#[tokio::test]
async fn keystore_validator_creation() {
ApiTester::new()
.await
.assert_enabled_validators_count(0)
.assert_validators_count(0)
.create_keystore_validators(KeystoreValidatorScenario {
correct_password: true,
enabled: true,
})
.await
.assert_enabled_validators_count(1)
.assert_validators_count(1)
.create_keystore_validators(KeystoreValidatorScenario {
correct_password: false,
enabled: true,
})
.await
.assert_enabled_validators_count(1)
.assert_validators_count(1)
.create_keystore_validators(KeystoreValidatorScenario {
correct_password: true,
enabled: false,
})
.await
.assert_enabled_validators_count(1)
.assert_validators_count(2);
}
#[test]
fn web3signer_validator_creation() {
let runtime = build_runtime();
let weak_runtime = Arc::downgrade(&runtime);
runtime.block_on(async {
ApiTester::new(weak_runtime)
.await
.assert_enabled_validators_count(0)
.assert_validators_count(0)
.create_web3signer_validators(Web3SignerValidatorScenario {
count: 1,
enabled: true,
})
.await
.assert_enabled_validators_count(1)
.assert_validators_count(1);
});
#[tokio::test]
async fn web3signer_validator_creation() {
ApiTester::new()
.await
.assert_enabled_validators_count(0)
.assert_validators_count(0)
.create_web3signer_validators(Web3SignerValidatorScenario {
count: 1,
enabled: true,
})
.await
.assert_enabled_validators_count(1)
.assert_validators_count(1);
}

View File

@@ -12,6 +12,7 @@ use itertools::Itertools;
use rand::{rngs::SmallRng, Rng, SeedableRng};
use slashing_protection::interchange::{Interchange, InterchangeMetadata};
use std::{collections::HashMap, path::Path};
use tokio::runtime::Handle;
use types::Address;
fn new_keystore(password: ZeroizeString) -> Keystore {
@@ -64,31 +65,23 @@ fn remotekey_validator_with_pubkey(pubkey: PublicKey) -> SingleImportRemotekeysR
}
}
fn run_test<F, V>(f: F)
async fn run_test<F, V>(f: F)
where
F: FnOnce(ApiTester) -> V,
V: Future<Output = ()>,
{
let runtime = build_runtime();
let weak_runtime = Arc::downgrade(&runtime);
runtime.block_on(async {
let tester = ApiTester::new(weak_runtime).await;
f(tester).await
});
let tester = ApiTester::new().await;
f(tester).await
}
fn run_dual_vc_test<F, V>(f: F)
async fn run_dual_vc_test<F, V>(f: F)
where
F: FnOnce(ApiTester, ApiTester) -> V,
V: Future<Output = ()>,
{
let runtime = build_runtime();
let weak_runtime = Arc::downgrade(&runtime);
runtime.block_on(async {
let tester1 = ApiTester::new(weak_runtime.clone()).await;
let tester2 = ApiTester::new(weak_runtime).await;
f(tester1, tester2).await
});
let tester1 = ApiTester::new().await;
let tester2 = ApiTester::new().await;
f(tester1, tester2).await
}
fn keystore_pubkey(keystore: &Keystore) -> PublicKeyBytes {
@@ -199,8 +192,8 @@ fn check_remotekey_delete_response(
}
}
#[test]
fn get_auth_no_token() {
#[tokio::test]
async fn get_auth_no_token() {
run_test(|mut tester| async move {
let _ = &tester;
tester.client.send_authorization_header(false);
@@ -213,19 +206,21 @@ fn get_auth_no_token() {
// The token should match the one that the client was originally initialised with.
assert!(tester.client.api_token() == Some(&token));
})
.await;
}
#[test]
fn get_empty_keystores() {
#[tokio::test]
async fn get_empty_keystores() {
run_test(|tester| async move {
let _ = &tester;
let res = tester.client.get_keystores().await.unwrap();
assert_eq!(res, ListKeystoresResponse { data: vec![] });
})
.await;
}
#[test]
fn import_new_keystores() {
#[tokio::test]
async fn import_new_keystores() {
run_test(|tester| async move {
let _ = &tester;
let password = random_password_string();
@@ -250,10 +245,11 @@ fn import_new_keystores() {
let get_res = tester.client.get_keystores().await.unwrap();
check_keystore_get_response(&get_res, &keystores);
})
.await;
}
#[test]
fn import_only_duplicate_keystores() {
#[tokio::test]
async fn import_only_duplicate_keystores() {
run_test(|tester| async move {
let _ = &tester;
let password = random_password_string();
@@ -279,10 +275,11 @@ fn import_only_duplicate_keystores() {
let get_res = tester.client.get_keystores().await.unwrap();
check_keystore_get_response(&get_res, &keystores);
})
.await;
}
#[test]
fn import_some_duplicate_keystores() {
#[tokio::test]
async fn import_some_duplicate_keystores() {
run_test(|tester| async move {
let _ = &tester;
let password = random_password_string();
@@ -330,10 +327,11 @@ fn import_some_duplicate_keystores() {
let import_res = tester.client.post_keystores(&req2).await.unwrap();
check_keystore_import_response(&import_res, expected);
})
.await;
}
#[test]
fn import_wrong_number_of_passwords() {
#[tokio::test]
async fn import_wrong_number_of_passwords() {
run_test(|tester| async move {
let _ = &tester;
let password = random_password_string();
@@ -352,10 +350,11 @@ fn import_wrong_number_of_passwords() {
.unwrap_err();
assert_eq!(err.status().unwrap(), 400);
})
.await;
}
#[test]
fn get_web3_signer_keystores() {
#[tokio::test]
async fn get_web3_signer_keystores() {
run_test(|tester| async move {
let _ = &tester;
let num_local = 3;
@@ -412,10 +411,11 @@ fn get_web3_signer_keystores() {
assert!(get_res.data.contains(&response), "{:?}", response);
}
})
.await;
}
#[test]
fn import_and_delete_conflicting_web3_signer_keystores() {
#[tokio::test]
async fn import_and_delete_conflicting_web3_signer_keystores() {
run_test(|tester| async move {
let _ = &tester;
let num_keystores = 3;
@@ -477,10 +477,11 @@ fn import_and_delete_conflicting_web3_signer_keystores() {
let delete_res = tester.client.delete_keystores(&delete_req).await.unwrap();
check_keystore_delete_response(&delete_res, all_delete_error(keystores.len()));
})
.await;
}
#[test]
fn import_keystores_wrong_password() {
#[tokio::test]
async fn import_keystores_wrong_password() {
run_test(|tester| async move {
let _ = &tester;
let num_keystores = 4;
@@ -551,11 +552,12 @@ fn import_keystores_wrong_password() {
&import_res,
(0..num_keystores).map(|_| ImportKeystoreStatus::Duplicate),
);
});
})
.await;
}
#[test]
fn import_invalid_slashing_protection() {
#[tokio::test]
async fn import_invalid_slashing_protection() {
run_test(|tester| async move {
let _ = &tester;
let password = random_password_string();
@@ -589,10 +591,11 @@ fn import_invalid_slashing_protection() {
let get_res = tester.client.get_keystores().await.unwrap();
check_keystore_get_response(&get_res, &[]);
})
.await;
}
#[test]
fn check_get_set_fee_recipient() {
#[tokio::test]
async fn check_get_set_fee_recipient() {
run_test(|tester: ApiTester| async move {
let _ = &tester;
let password = random_password_string();
@@ -768,10 +771,11 @@ fn check_get_set_fee_recipient() {
);
}
})
.await;
}
#[test]
fn check_get_set_gas_limit() {
#[tokio::test]
async fn check_get_set_gas_limit() {
run_test(|tester: ApiTester| async move {
let _ = &tester;
let password = random_password_string();
@@ -943,14 +947,15 @@ fn check_get_set_gas_limit() {
);
}
})
.await
}
fn all_indices(count: usize) -> Vec<usize> {
(0..count).collect()
}
#[test]
fn migrate_all_with_slashing_protection() {
#[tokio::test]
async fn migrate_all_with_slashing_protection() {
let n = 3;
generic_migration_test(
n,
@@ -967,11 +972,12 @@ fn migrate_all_with_slashing_protection() {
(1, make_attestation(2, 3), false),
(2, make_attestation(1, 2), false),
],
);
)
.await;
}
#[test]
fn migrate_some_with_slashing_protection() {
#[tokio::test]
async fn migrate_some_with_slashing_protection() {
let n = 3;
generic_migration_test(
n,
@@ -989,11 +995,12 @@ fn migrate_some_with_slashing_protection() {
(0, make_attestation(2, 3), true),
(1, make_attestation(3, 4), true),
],
);
)
.await;
}
#[test]
fn migrate_some_missing_slashing_protection() {
#[tokio::test]
async fn migrate_some_missing_slashing_protection() {
let n = 3;
generic_migration_test(
n,
@@ -1010,11 +1017,12 @@ fn migrate_some_missing_slashing_protection() {
(1, make_attestation(2, 3), true),
(0, make_attestation(2, 3), true),
],
);
)
.await;
}
#[test]
fn migrate_some_extra_slashing_protection() {
#[tokio::test]
async fn migrate_some_extra_slashing_protection() {
let n = 3;
generic_migration_test(
n,
@@ -1033,7 +1041,8 @@ fn migrate_some_extra_slashing_protection() {
(1, make_attestation(3, 4), true),
(2, make_attestation(2, 3), false),
],
);
)
.await;
}
/// Run a test that creates some validators on one VC, and then migrates them to a second VC.
@@ -1051,7 +1060,7 @@ fn migrate_some_extra_slashing_protection() {
/// - `import_indices`: validators to transfer. It needn't be a subset of `delete_indices`.
/// - `second_vc_attestations`: attestations to sign on the second VC after the transfer. The bool
/// indicates whether the signing should be successful.
fn generic_migration_test(
async fn generic_migration_test(
num_validators: usize,
first_vc_attestations: Vec<(usize, Attestation<E>)>,
delete_indices: Vec<usize>,
@@ -1169,11 +1178,12 @@ fn generic_migration_test(
Err(e) => assert!(!should_succeed, "{:?}", e),
}
}
});
})
.await
}
#[test]
fn delete_keystores_twice() {
#[tokio::test]
async fn delete_keystores_twice() {
run_test(|tester| async move {
let _ = &tester;
let password = random_password_string();
@@ -1201,10 +1211,11 @@ fn delete_keystores_twice() {
let delete_res = tester.client.delete_keystores(&delete_req).await.unwrap();
check_keystore_delete_response(&delete_res, all_not_active(keystores.len()));
})
.await
}
#[test]
fn delete_nonexistent_keystores() {
#[tokio::test]
async fn delete_nonexistent_keystores() {
run_test(|tester| async move {
let _ = &tester;
let password = random_password_string();
@@ -1219,6 +1230,7 @@ fn delete_nonexistent_keystores() {
let delete_res = tester.client.delete_keystores(&delete_req).await.unwrap();
check_keystore_delete_response(&delete_res, all_not_found(keystores.len()));
})
.await
}
fn make_attestation(source_epoch: u64, target_epoch: u64) -> Attestation<E> {
@@ -1242,9 +1254,9 @@ fn make_attestation(source_epoch: u64, target_epoch: u64) -> Attestation<E> {
}
}
#[test]
fn delete_concurrent_with_signing() {
let runtime = build_runtime();
#[tokio::test]
async fn delete_concurrent_with_signing() {
let handle = Handle::try_current().unwrap();
let num_keys = 8;
let num_signing_threads = 8;
let num_attestations = 100;
@@ -1257,115 +1269,112 @@ fn delete_concurrent_with_signing() {
"num_keys should be divisible by num threads for simplicity"
);
let weak_runtime = Arc::downgrade(&runtime);
runtime.block_on(async {
let tester = ApiTester::new(weak_runtime).await;
let tester = ApiTester::new().await;
// Generate a lot of keys and import them.
let password = random_password_string();
let keystores = (0..num_keys)
.map(|_| new_keystore(password.clone()))
.collect::<Vec<_>>();
let all_pubkeys = keystores.iter().map(keystore_pubkey).collect::<Vec<_>>();
// Generate a lot of keys and import them.
let password = random_password_string();
let keystores = (0..num_keys)
.map(|_| new_keystore(password.clone()))
.collect::<Vec<_>>();
let all_pubkeys = keystores.iter().map(keystore_pubkey).collect::<Vec<_>>();
let import_res = tester
.client
.post_keystores(&ImportKeystoresRequest {
keystores: keystores.clone(),
passwords: vec![password.clone(); keystores.len()],
slashing_protection: None,
})
.await
.unwrap();
check_keystore_import_response(&import_res, all_imported(keystores.len()));
let import_res = tester
.client
.post_keystores(&ImportKeystoresRequest {
keystores: keystores.clone(),
passwords: vec![password.clone(); keystores.len()],
slashing_protection: None,
})
.await
.unwrap();
check_keystore_import_response(&import_res, all_imported(keystores.len()));
// Start several threads signing attestations at sequential epochs.
let mut join_handles = vec![];
// Start several threads signing attestations at sequential epochs.
let mut join_handles = vec![];
for thread_index in 0..num_signing_threads {
let keys_per_thread = num_keys / num_signing_threads;
let validator_store = tester.validator_store.clone();
let thread_pubkeys = all_pubkeys
[thread_index * keys_per_thread..(thread_index + 1) * keys_per_thread]
.to_vec();
for thread_index in 0..num_signing_threads {
let keys_per_thread = num_keys / num_signing_threads;
let validator_store = tester.validator_store.clone();
let thread_pubkeys = all_pubkeys
[thread_index * keys_per_thread..(thread_index + 1) * keys_per_thread]
.to_vec();
let handle = runtime.spawn(async move {
for j in 0..num_attestations {
let mut att = make_attestation(j, j + 1);
for (_validator_id, public_key) in thread_pubkeys.iter().enumerate() {
let _ = validator_store
.sign_attestation(*public_key, 0, &mut att, Epoch::new(j + 1))
.await;
}
let handle = handle.spawn(async move {
for j in 0..num_attestations {
let mut att = make_attestation(j, j + 1);
for (_validator_id, public_key) in thread_pubkeys.iter().enumerate() {
let _ = validator_store
.sign_attestation(*public_key, 0, &mut att, Epoch::new(j + 1))
.await;
}
});
join_handles.push(handle);
}
// Concurrently, delete each validator one at a time. Store the slashing protection
// data so we can ensure it doesn't change after a key is exported.
let mut delete_handles = vec![];
for _ in 0..num_delete_threads {
let client = tester.client.clone();
let all_pubkeys = all_pubkeys.clone();
let handle = runtime.spawn(async move {
let mut rng = SmallRng::from_entropy();
let mut slashing_protection = vec![];
for _ in 0..num_delete_attempts {
let to_delete = all_pubkeys
.iter()
.filter(|_| rng.gen_bool(delete_prob))
.copied()
.collect::<Vec<_>>();
if !to_delete.is_empty() {
let delete_res = client
.delete_keystores(&DeleteKeystoresRequest { pubkeys: to_delete })
.await
.unwrap();
for status in delete_res.data.iter() {
assert_ne!(status.status, DeleteKeystoreStatus::Error);
}
slashing_protection.push(delete_res.slashing_protection);
}
}
slashing_protection
});
delete_handles.push(handle);
}
// Collect slashing protection.
let mut slashing_protection_map = HashMap::new();
let collected_slashing_protection = futures::future::join_all(delete_handles).await;
for interchange in collected_slashing_protection
.into_iter()
.flat_map(Result::unwrap)
{
for validator_data in interchange.data {
slashing_protection_map
.entry(validator_data.pubkey)
.and_modify(|existing| {
assert_eq!(
*existing, validator_data,
"slashing protection data changed after first export"
)
})
.or_insert(validator_data);
}
}
});
join_handles.push(handle);
}
futures::future::join_all(join_handles).await
});
// Concurrently, delete each validator one at a time. Store the slashing protection
// data so we can ensure it doesn't change after a key is exported.
let mut delete_handles = vec![];
for _ in 0..num_delete_threads {
let client = tester.client.clone();
let all_pubkeys = all_pubkeys.clone();
let handle = handle.spawn(async move {
let mut rng = SmallRng::from_entropy();
let mut slashing_protection = vec![];
for _ in 0..num_delete_attempts {
let to_delete = all_pubkeys
.iter()
.filter(|_| rng.gen_bool(delete_prob))
.copied()
.collect::<Vec<_>>();
if !to_delete.is_empty() {
let delete_res = client
.delete_keystores(&DeleteKeystoresRequest { pubkeys: to_delete })
.await
.unwrap();
for status in delete_res.data.iter() {
assert_ne!(status.status, DeleteKeystoreStatus::Error);
}
slashing_protection.push(delete_res.slashing_protection);
}
}
slashing_protection
});
delete_handles.push(handle);
}
// Collect slashing protection.
let mut slashing_protection_map = HashMap::new();
let collected_slashing_protection = futures::future::join_all(delete_handles).await;
for interchange in collected_slashing_protection
.into_iter()
.flat_map(Result::unwrap)
{
for validator_data in interchange.data {
slashing_protection_map
.entry(validator_data.pubkey)
.and_modify(|existing| {
assert_eq!(
*existing, validator_data,
"slashing protection data changed after first export"
)
})
.or_insert(validator_data);
}
}
futures::future::join_all(join_handles).await;
}
#[test]
fn delete_then_reimport() {
#[tokio::test]
async fn delete_then_reimport() {
run_test(|tester| async move {
let _ = &tester;
let password = random_password_string();
@@ -1396,19 +1405,21 @@ fn delete_then_reimport() {
let import_res = tester.client.post_keystores(&import_req).await.unwrap();
check_keystore_import_response(&import_res, all_imported(keystores.len()));
})
.await
}
#[test]
fn get_empty_remotekeys() {
#[tokio::test]
async fn get_empty_remotekeys() {
run_test(|tester| async move {
let _ = &tester;
let res = tester.client.get_remotekeys().await.unwrap();
assert_eq!(res, ListRemotekeysResponse { data: vec![] });
})
.await
}
#[test]
fn import_new_remotekeys() {
#[tokio::test]
async fn import_new_remotekeys() {
run_test(|tester| async move {
let _ = &tester;
@@ -1443,10 +1454,11 @@ fn import_new_remotekeys() {
let get_res = tester.client.get_remotekeys().await.unwrap();
check_remotekey_get_response(&get_res, expected_responses);
})
.await
}
#[test]
fn import_same_remotekey_different_url() {
#[tokio::test]
async fn import_same_remotekey_different_url() {
run_test(|tester| async move {
let _ = &tester;
@@ -1485,10 +1497,11 @@ fn import_same_remotekey_different_url() {
}],
);
})
.await
}
#[test]
fn delete_remotekey_then_reimport_different_url() {
#[tokio::test]
async fn delete_remotekey_then_reimport_different_url() {
run_test(|tester| async move {
let _ = &tester;
@@ -1534,10 +1547,11 @@ fn delete_remotekey_then_reimport_different_url() {
vec![ImportRemotekeyStatus::Imported].into_iter(),
);
})
.await
}
#[test]
fn import_only_duplicate_remotekeys() {
#[tokio::test]
async fn import_only_duplicate_remotekeys() {
run_test(|tester| async move {
let _ = &tester;
let remotekeys = (0..3)
@@ -1582,10 +1596,11 @@ fn import_only_duplicate_remotekeys() {
let get_res = tester.client.get_remotekeys().await.unwrap();
check_remotekey_get_response(&get_res, expected_responses);
})
.await
}
#[test]
fn import_some_duplicate_remotekeys() {
#[tokio::test]
async fn import_some_duplicate_remotekeys() {
run_test(|tester| async move {
let _ = &tester;
let num_remotekeys = 5;
@@ -1649,10 +1664,11 @@ fn import_some_duplicate_remotekeys() {
let get_res = tester.client.get_remotekeys().await.unwrap();
check_remotekey_get_response(&get_res, expected_responses);
})
.await
}
#[test]
fn import_remote_and_local_keys() {
#[tokio::test]
async fn import_remote_and_local_keys() {
run_test(|tester| async move {
let _ = &tester;
let num_local = 3;
@@ -1714,10 +1730,11 @@ fn import_remote_and_local_keys() {
assert!(get_res.data.contains(&response), "{:?}", response);
}
})
.await
}
#[test]
fn import_same_local_and_remote_keys() {
#[tokio::test]
async fn import_same_local_and_remote_keys() {
run_test(|tester| async move {
let _ = &tester;
let num_local = 3;
@@ -1782,9 +1799,10 @@ fn import_same_local_and_remote_keys() {
assert!(get_res.data.contains(&response), "{:?}", response);
}
})
.await
}
#[test]
fn import_same_remote_and_local_keys() {
#[tokio::test]
async fn import_same_remote_and_local_keys() {
run_test(|tester| async move {
let _ = &tester;
let num_local = 3;
@@ -1847,10 +1865,11 @@ fn import_same_remote_and_local_keys() {
let get_res = tester.client.get_remotekeys().await.unwrap();
check_remotekey_get_response(&get_res, expected_responses);
})
.await
}
#[test]
fn delete_remotekeys_twice() {
#[tokio::test]
async fn delete_remotekeys_twice() {
run_test(|tester| async move {
let _ = &tester;
@@ -1893,10 +1912,11 @@ fn delete_remotekeys_twice() {
let get_res = tester.client.get_remotekeys().await.unwrap();
check_remotekey_get_response(&get_res, Vec::new());
})
.await
}
#[test]
fn delete_nonexistent_remotekey() {
#[tokio::test]
async fn delete_nonexistent_remotekey() {
run_test(|tester| async move {
let _ = &tester;
@@ -1919,10 +1939,11 @@ fn delete_nonexistent_remotekey() {
let get_res = tester.client.get_remotekeys().await.unwrap();
check_remotekey_get_response(&get_res, Vec::new());
})
.await
}
#[test]
fn delete_then_reimport_remotekeys() {
#[tokio::test]
async fn delete_then_reimport_remotekeys() {
run_test(|tester| async move {
let _ = &tester;
@@ -1984,10 +2005,11 @@ fn delete_then_reimport_remotekeys() {
let get_res = tester.client.get_remotekeys().await.unwrap();
check_remotekey_get_response(&get_res, expected_responses);
})
.await
}
#[test]
fn import_remotekey_web3signer() {
#[tokio::test]
async fn import_remotekey_web3signer() {
run_test(|tester| async move {
let _ = &tester;
@@ -2043,10 +2065,11 @@ fn import_remotekey_web3signer() {
let get_res = tester.client.get_remotekeys().await.unwrap();
check_remotekey_get_response(&get_res, expected_responses);
})
.await
}
#[test]
fn import_remotekey_web3signer_disabled() {
#[tokio::test]
async fn import_remotekey_web3signer_disabled() {
run_test(|tester| async move {
let _ = &tester;
@@ -2096,10 +2119,11 @@ fn import_remotekey_web3signer_disabled() {
let get_res = tester.client.get_remotekeys().await.unwrap();
check_remotekey_get_response(&get_res, expected_responses);
})
.await
}
#[test]
fn import_remotekey_web3signer_enabled() {
#[tokio::test]
async fn import_remotekey_web3signer_enabled() {
run_test(|tester| async move {
let _ = &tester;
@@ -2156,4 +2180,5 @@ fn import_remotekey_web3signer_enabled() {
let get_res = tester.client.get_remotekeys().await.unwrap();
check_remotekey_get_response(&get_res, expected_responses);
})
.await
}