mirror of
https://github.com/sigp/lighthouse.git
synced 2026-03-21 13:54:44 +00:00
Start adding validator move
This commit is contained in:
@@ -1,8 +1,12 @@
|
||||
use account_utils::ZeroizeString;
|
||||
use eth2::lighthouse_vc::std_types::{InterchangeJsonStr, KeystoreJsonStr};
|
||||
use eth2::SensitiveUrl;
|
||||
use eth2::{
|
||||
lighthouse_vc::{http_client::ValidatorClientHttpClient, std_types::SingleKeystoreResponse},
|
||||
SensitiveUrl,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tree_hash::TreeHash;
|
||||
use types::*;
|
||||
|
||||
@@ -189,3 +193,35 @@ mod bytes_4_without_0x_prefix {
|
||||
Ok(array)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn vc_http_client<P: AsRef<Path>>(
|
||||
url: SensitiveUrl,
|
||||
token_path: P,
|
||||
) -> Result<(ValidatorClientHttpClient, Vec<SingleKeystoreResponse>), String> {
|
||||
let token_path = token_path.as_ref();
|
||||
let token_bytes =
|
||||
fs::read(&token_path).map_err(|e| format!("Failed to read {:?}: {:?}", token_path, e))?;
|
||||
let token_string = String::from_utf8(token_bytes)
|
||||
.map_err(|e| format!("Failed to parse {:?} as utf8: {:?}", token_path, e))?;
|
||||
let http_client = ValidatorClientHttpClient::new(url.clone(), token_string).map_err(|e| {
|
||||
format!(
|
||||
"Could not instantiate HTTP client from URL and secret: {:?}",
|
||||
e
|
||||
)
|
||||
})?;
|
||||
|
||||
// Perform a request to check that the connection works
|
||||
let remote_keystores = http_client
|
||||
.get_keystores()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to list keystores on VC: {:?}", e))?
|
||||
.data;
|
||||
|
||||
eprintln!(
|
||||
"Validator client is reachable at {} and reports {} validators",
|
||||
url,
|
||||
remote_keystores.len()
|
||||
);
|
||||
|
||||
Ok((http_client, remote_keystores))
|
||||
}
|
||||
|
||||
@@ -2,10 +2,7 @@ use super::common::*;
|
||||
use crate::DumpConfig;
|
||||
use clap::{App, Arg, ArgMatches};
|
||||
use eth2::{
|
||||
lighthouse_vc::{
|
||||
http_client::ValidatorClientHttpClient, std_types::ImportKeystoresRequest,
|
||||
types::UpdateFeeRecipientRequest,
|
||||
},
|
||||
lighthouse_vc::{std_types::ImportKeystoresRequest, types::UpdateFeeRecipientRequest},
|
||||
SensitiveUrl,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -132,32 +129,7 @@ async fn run<'a>(config: ImportConfig) -> Result<(), String> {
|
||||
|
||||
let count = validators.len();
|
||||
|
||||
let http_client = {
|
||||
let token_bytes = fs::read(&vc_token_path)
|
||||
.map_err(|e| format!("Failed to read {:?}: {:?}", vc_token_path, e))?;
|
||||
let token_string = String::from_utf8(token_bytes)
|
||||
.map_err(|e| format!("Failed to parse {:?} as utf8: {:?}", vc_token_path, e))?;
|
||||
let http_client =
|
||||
ValidatorClientHttpClient::new(vc_url.clone(), token_string).map_err(|e| {
|
||||
format!(
|
||||
"Could not instantiate HTTP client from URL and secret: {:?}",
|
||||
e
|
||||
)
|
||||
})?;
|
||||
|
||||
// Perform a request to check that the connection works
|
||||
let remote_keystores = http_client
|
||||
.get_keystores()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to list keystores on VC: {:?}", e))?;
|
||||
eprintln!(
|
||||
"Validator client is reachable at {} and reports {} validators",
|
||||
vc_url,
|
||||
remote_keystores.data.len()
|
||||
);
|
||||
|
||||
http_client
|
||||
};
|
||||
let (http_client, _keystores) = vc_http_client(vc_url.clone(), &vc_token_path).await?;
|
||||
|
||||
eprintln!(
|
||||
"Starting to submit validators {} to VC, each validator may take several seconds",
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
pub mod common;
|
||||
pub mod create_validators;
|
||||
pub mod import_validators;
|
||||
pub mod move_validators;
|
||||
|
||||
use crate::DumpConfig;
|
||||
use clap::{App, ArgMatches};
|
||||
|
||||
473
validator_manager/src/validators/move_validators.rs
Normal file
473
validator_manager/src/validators/move_validators.rs
Normal file
@@ -0,0 +1,473 @@
|
||||
use super::common::*;
|
||||
use crate::DumpConfig;
|
||||
use clap::{App, Arg, ArgMatches};
|
||||
use eth2::{
|
||||
lighthouse_vc::{
|
||||
http_client::ValidatorClientHttpClient,
|
||||
std_types::{DeleteKeystoresRequest, ImportKeystoresRequest},
|
||||
types::UpdateFeeRecipientRequest,
|
||||
},
|
||||
SensitiveUrl,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use types::PublicKeyBytes;
|
||||
|
||||
pub const MOVE_DIR_NAME: &str = "lighthouse-validator-move";
|
||||
|
||||
pub const CMD: &str = "move";
|
||||
pub const WORKING_DIRECTORY_FLAG: &str = "working-directory";
|
||||
pub const SRC_VALIDATOR_CLIENT_URL_FLAG: &str = "src-validator-client-url";
|
||||
pub const SRC_VALIDATOR_CLIENT_TOKEN_FLAG: &str = "src-validator-client-token";
|
||||
pub const DEST_VALIDATOR_CLIENT_URL_FLAG: &str = "dest-validator-client-url";
|
||||
pub const DEST_VALIDATOR_CLIENT_TOKEN_FLAG: &str = "dest-validator-client-token";
|
||||
pub const VALIDATORS_FLAG: &str = "validators";
|
||||
|
||||
pub fn cli_app<'a, 'b>() -> App<'a, 'b> {
|
||||
App::new(CMD)
|
||||
.about(
|
||||
"Uploads validators to a validator client using the HTTP API. The validators \
|
||||
are defined in a JSON file which can be generated using the \"create-validators\" \
|
||||
command.",
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name(WORKING_DIRECTORY_FLAG)
|
||||
.long(WORKING_DIRECTORY_FLAG)
|
||||
.value_name("PATH_TO_DIRECTORY")
|
||||
.help(
|
||||
"The path to a directory where the application can write files.\
|
||||
Under certain failure scenarios this directory may contain files which \
|
||||
can be used to recover validators.",
|
||||
)
|
||||
.required(true)
|
||||
.takes_value(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name(SRC_VALIDATOR_CLIENT_URL_FLAG)
|
||||
.long(SRC_VALIDATOR_CLIENT_URL_FLAG)
|
||||
.value_name("HTTP_ADDRESS")
|
||||
.help(
|
||||
"A HTTP(S) address of a validator client using the keymanager-API. \
|
||||
This validator client is the \"source\" and contains the validators \
|
||||
that are to be moved.",
|
||||
)
|
||||
.required(true)
|
||||
.requires(SRC_VALIDATOR_CLIENT_TOKEN_FLAG)
|
||||
.takes_value(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name(SRC_VALIDATOR_CLIENT_TOKEN_FLAG)
|
||||
.long(SRC_VALIDATOR_CLIENT_TOKEN_FLAG)
|
||||
.value_name("PATH")
|
||||
.help("The file containing a token required by the source validator client.")
|
||||
.takes_value(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name(DEST_VALIDATOR_CLIENT_URL_FLAG)
|
||||
.long(DEST_VALIDATOR_CLIENT_URL_FLAG)
|
||||
.value_name("HTTP_ADDRESS")
|
||||
.help(
|
||||
"A HTTP(S) address of a validator client using the keymanager-API. \
|
||||
This validator client is the \"destination\" and will have new validators \
|
||||
added as they are removed from the \"source\" validator client.",
|
||||
)
|
||||
.required(true)
|
||||
.requires(DEST_VALIDATOR_CLIENT_TOKEN_FLAG)
|
||||
.takes_value(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name(DEST_VALIDATOR_CLIENT_TOKEN_FLAG)
|
||||
.long(DEST_VALIDATOR_CLIENT_TOKEN_FLAG)
|
||||
.value_name("PATH")
|
||||
.help("The file containing a token required by the destination validator client.")
|
||||
.takes_value(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name(VALIDATORS_FLAG)
|
||||
.long(VALIDATORS_FLAG)
|
||||
.value_name("STRING")
|
||||
.help(
|
||||
"One or more validator public keys (as 0x-prefixed hex) to be moved from \
|
||||
the source to destination validator clients. Alternatively, use \"all\" to \
|
||||
move all the validators from the source validator client.",
|
||||
)
|
||||
.required(true)
|
||||
.takes_value(true),
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Debug, Serialize, Deserialize)]
|
||||
pub enum Validators {
|
||||
All,
|
||||
Some(Vec<PublicKeyBytes>),
|
||||
}
|
||||
|
||||
impl FromStr for Validators {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s {
|
||||
"all" => Ok(Validators::All),
|
||||
other => other
|
||||
.split(',')
|
||||
.map(PublicKeyBytes::from_str)
|
||||
.collect::<Result<_, _>>()
|
||||
.map(Validators::Some),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Debug, Serialize, Deserialize)]
|
||||
pub struct MoveConfig {
|
||||
pub working_directory_path: PathBuf,
|
||||
pub src_vc_url: SensitiveUrl,
|
||||
pub src_vc_token_path: PathBuf,
|
||||
pub dest_vc_url: SensitiveUrl,
|
||||
pub dest_vc_token_path: PathBuf,
|
||||
pub validators: Validators,
|
||||
}
|
||||
|
||||
impl MoveConfig {
|
||||
fn from_cli(matches: &ArgMatches) -> Result<Self, String> {
|
||||
Ok(Self {
|
||||
working_directory_path: clap_utils::parse_required(matches, WORKING_DIRECTORY_FLAG)?,
|
||||
src_vc_url: clap_utils::parse_required(matches, SRC_VALIDATOR_CLIENT_URL_FLAG)?,
|
||||
src_vc_token_path: clap_utils::parse_required(
|
||||
matches,
|
||||
SRC_VALIDATOR_CLIENT_TOKEN_FLAG,
|
||||
)?,
|
||||
dest_vc_url: clap_utils::parse_required(matches, DEST_VALIDATOR_CLIENT_URL_FLAG)?,
|
||||
dest_vc_token_path: clap_utils::parse_required(
|
||||
matches,
|
||||
DEST_VALIDATOR_CLIENT_TOKEN_FLAG,
|
||||
)?,
|
||||
validators: clap_utils::parse_required(matches, VALIDATORS_FLAG)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn cli_run<'a>(
|
||||
matches: &'a ArgMatches<'a>,
|
||||
dump_config: DumpConfig,
|
||||
) -> Result<(), String> {
|
||||
let config = MoveConfig::from_cli(matches)?;
|
||||
if dump_config.should_exit_early(&config)? {
|
||||
Ok(())
|
||||
} else {
|
||||
run(config).await
|
||||
}
|
||||
}
|
||||
|
||||
async fn run<'a>(config: MoveConfig) -> Result<(), String> {
|
||||
let MoveConfig {
|
||||
working_directory_path,
|
||||
src_vc_url,
|
||||
src_vc_token_path,
|
||||
dest_vc_url,
|
||||
dest_vc_token_path,
|
||||
validators,
|
||||
} = config;
|
||||
|
||||
if !working_directory_path.exists() {
|
||||
return Err(format!("{:?} does not exist", working_directory_path));
|
||||
}
|
||||
|
||||
// Append another directory to the "working directory" provided by the user. By creating a new
|
||||
// directory we can prove (to some degree) that we can write in the given directory.
|
||||
//
|
||||
// It also allows us to easily detect when another identical process is running or the previous
|
||||
// run failed by checking to see if the directory already exists.
|
||||
let working_directory_path = working_directory_path.join(MOVE_DIR_NAME);
|
||||
if working_directory_path.exists() {
|
||||
return Err(format!(
|
||||
"{:?} already exists, exiting",
|
||||
working_directory_path
|
||||
));
|
||||
}
|
||||
|
||||
fs::create_dir(&working_directory_path)
|
||||
.map_err(|e| format!("Failed to create {:?}: {:?}", working_directory_path, e))?;
|
||||
|
||||
// Moving validators between the same VC is unlikely to be useful and probably indicates a user
|
||||
// error.
|
||||
if src_vc_url == dest_vc_url {
|
||||
return Err(format!(
|
||||
"--{} and --{} must be different",
|
||||
SRC_VALIDATOR_CLIENT_URL_FLAG, DEST_VALIDATOR_CLIENT_URL_FLAG
|
||||
));
|
||||
}
|
||||
|
||||
let (src_http_client, src_keystores) =
|
||||
vc_http_client(src_vc_url.clone(), &src_vc_token_path).await?;
|
||||
let (dest_http_client, dest_keystores) =
|
||||
vc_http_client(dest_vc_url.clone(), &dest_vc_token_path).await?;
|
||||
|
||||
let pubkeys_to_move = match validators {
|
||||
Validators::All => src_keystores.iter().map(|v| v.validating_pubkey).collect(),
|
||||
Validators::Some(request_pubkeys) => {
|
||||
let request_pubkeys_set: HashSet<_> = request_pubkeys.iter().collect();
|
||||
let src_pubkeys_set: HashSet<_> =
|
||||
src_keystores.iter().map(|v| &v.validating_pubkey).collect();
|
||||
let difference = request_pubkeys_set
|
||||
.difference(&src_pubkeys_set)
|
||||
.collect::<Vec<_>>();
|
||||
if !difference.is_empty() {
|
||||
for pk in &difference {
|
||||
eprintln!("{:?} is not present on {:?}", pk, src_vc_url);
|
||||
}
|
||||
return Err(format!(
|
||||
"{} validators not found on {:?}",
|
||||
difference.len(),
|
||||
src_vc_url
|
||||
));
|
||||
}
|
||||
request_pubkeys
|
||||
}
|
||||
};
|
||||
|
||||
// It doesn't make sense to move no validators.
|
||||
if pubkeys_to_move.is_empty() {
|
||||
return Err(format!("--{} cannot be an empty list", VALIDATORS_FLAG));
|
||||
}
|
||||
|
||||
for pubkey_to_move in pubkeys_to_move {
|
||||
let request = DeleteKeystoresRequest {
|
||||
pubkeys: vec![pubkey_to_move],
|
||||
};
|
||||
let deleted_keystore = match src_http_client.delete_keystores(&request).await {
|
||||
Ok(deleted) => deleted,
|
||||
Err(e) => {
|
||||
match src_http_client.get_keystores().await {
|
||||
Ok(response) => {
|
||||
if response
|
||||
.data
|
||||
.iter()
|
||||
.find(|v| v.validating_pubkey == pubkey_to_move)
|
||||
.is_some()
|
||||
{
|
||||
eprintln!(
|
||||
"There was an error removing a validator, however the validator \
|
||||
is still present on the source validator client. The recommended \
|
||||
solution is to run this command again."
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
eprintln!(
|
||||
"There was an error removing a validator and it's unclear if \
|
||||
the validator was removed or not. Manual user intervention is \
|
||||
required."
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
return Err(format!("Deleting {:?} failed with {:?}", pubkey_to_move, e));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/*
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::validators::create_validators::tests::TestBuilder as CreateTestBuilder;
|
||||
use std::fs;
|
||||
use tempfile::{tempdir, TempDir};
|
||||
use validator_client::http_api::test_utils::ApiTester;
|
||||
|
||||
const VC_TOKEN_FILE_NAME: &str = "vc_token.json";
|
||||
|
||||
struct TestBuilder {
|
||||
import_config: MoveConfig,
|
||||
vc: ApiTester,
|
||||
/// Holds the temp directory owned by the `CreateTestBuilder` so it doesn't get cleaned-up
|
||||
/// before we can read it.
|
||||
create_dir: Option<TempDir>,
|
||||
_dir: TempDir,
|
||||
}
|
||||
|
||||
impl TestBuilder {
|
||||
async fn new() -> Self {
|
||||
let dir = tempdir().unwrap();
|
||||
let vc = ApiTester::new().await;
|
||||
let vc_token_path = dir.path().join(VC_TOKEN_FILE_NAME);
|
||||
fs::write(&vc_token_path, &vc.api_token).unwrap();
|
||||
|
||||
Self {
|
||||
import_config: MoveConfig {
|
||||
// This field will be overwritten later on.
|
||||
validators_file_path: dir.path().into(),
|
||||
vc_url: vc.url.clone(),
|
||||
vc_token_path,
|
||||
ignore_duplicates: false,
|
||||
},
|
||||
vc,
|
||||
create_dir: None,
|
||||
_dir: dir,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mutate_import_config<F: Fn(&mut MoveConfig)>(mut self, func: F) -> Self {
|
||||
func(&mut self.import_config);
|
||||
self
|
||||
}
|
||||
|
||||
async fn create_validators(mut self, count: u32, first_index: u32) -> Self {
|
||||
let create_result = CreateTestBuilder::default()
|
||||
.mutate_config(|config| {
|
||||
config.count = count;
|
||||
config.first_index = first_index;
|
||||
})
|
||||
.run_test()
|
||||
.await;
|
||||
assert!(
|
||||
create_result.result.is_ok(),
|
||||
"precondition: validators are created"
|
||||
);
|
||||
self.import_config.validators_file_path = create_result.validators_file_path();
|
||||
self.create_dir = Some(create_result.output_dir);
|
||||
self
|
||||
}
|
||||
|
||||
/// Imports validators without running the entire test suite in `Self::run_test`. This is
|
||||
/// useful for simulating duplicate imports.
|
||||
async fn import_validators_without_checks(self) -> Self {
|
||||
run(self.import_config.clone()).await.unwrap();
|
||||
self
|
||||
}
|
||||
|
||||
async fn run_test(self) -> TestResult {
|
||||
let result = run(self.import_config.clone()).await;
|
||||
|
||||
if result.is_ok() {
|
||||
let local_validators: Vec<ValidatorSpecification> = {
|
||||
let contents =
|
||||
fs::read_to_string(&self.import_config.validators_file_path).unwrap();
|
||||
serde_json::from_str(&contents).unwrap()
|
||||
};
|
||||
let list_keystores_response = self.vc.client.get_keystores().await.unwrap().data;
|
||||
|
||||
assert_eq!(
|
||||
local_validators.len(),
|
||||
list_keystores_response.len(),
|
||||
"vc should have exactly the number of validators imported"
|
||||
);
|
||||
|
||||
for local_validator in &local_validators {
|
||||
let local_keystore = &local_validator.voting_keystore.0;
|
||||
let local_pubkey = local_keystore.public_key().unwrap().into();
|
||||
let remote_validator = list_keystores_response
|
||||
.iter()
|
||||
.find(|validator| validator.validating_pubkey == local_pubkey)
|
||||
.expect("validator must exist on VC");
|
||||
assert_eq!(&remote_validator.derivation_path, &local_keystore.path());
|
||||
// It's not immediately clear why Lighthouse returns `None` rather than
|
||||
// `Some(false)` here, I would expect the latter to be the most accurate.
|
||||
// However, it doesn't seem like a big deal.
|
||||
//
|
||||
// See: https://github.com/sigp/lighthouse/pull/3490
|
||||
//
|
||||
// If that PR changes we'll need to change this line.
|
||||
assert_eq!(remote_validator.readonly, None);
|
||||
}
|
||||
}
|
||||
|
||||
TestResult { result }
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use] // Use the `assert_ok` or `assert_err` fns to "use" this value.
|
||||
struct TestResult {
|
||||
result: Result<(), String>,
|
||||
}
|
||||
|
||||
impl TestResult {
|
||||
fn assert_ok(self) {
|
||||
assert_eq!(self.result, Ok(()))
|
||||
}
|
||||
|
||||
fn assert_err_is(self, msg: String) {
|
||||
assert_eq!(self.result, Err(msg))
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn create_one_validator() {
|
||||
TestBuilder::new()
|
||||
.await
|
||||
.create_validators(1, 0)
|
||||
.await
|
||||
.run_test()
|
||||
.await
|
||||
.assert_ok();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn create_three_validators() {
|
||||
TestBuilder::new()
|
||||
.await
|
||||
.create_validators(3, 0)
|
||||
.await
|
||||
.run_test()
|
||||
.await
|
||||
.assert_ok();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn create_one_validator_with_offset() {
|
||||
TestBuilder::new()
|
||||
.await
|
||||
.create_validators(1, 42)
|
||||
.await
|
||||
.run_test()
|
||||
.await
|
||||
.assert_ok();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn create_three_validators_with_offset() {
|
||||
TestBuilder::new()
|
||||
.await
|
||||
.create_validators(3, 1337)
|
||||
.await
|
||||
.run_test()
|
||||
.await
|
||||
.assert_ok();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn import_duplicates_when_disallowed() {
|
||||
TestBuilder::new()
|
||||
.await
|
||||
.create_validators(1, 0)
|
||||
.await
|
||||
.import_validators_without_checks()
|
||||
.await
|
||||
.run_test()
|
||||
.await
|
||||
.assert_err_is(DETECTED_DUPLICATE_MESSAGE.to_string());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn import_duplicates_when_allowed() {
|
||||
TestBuilder::new()
|
||||
.await
|
||||
.mutate_import_config(|config| {
|
||||
config.ignore_duplicates = true;
|
||||
})
|
||||
.create_validators(1, 0)
|
||||
.await
|
||||
.import_validators_without_checks()
|
||||
.await
|
||||
.run_test()
|
||||
.await
|
||||
.assert_ok();
|
||||
}
|
||||
}
|
||||
*/
|
||||
Reference in New Issue
Block a user