diff --git a/beacon_node/beacon_chain/src/eth1_chain.rs b/beacon_node/beacon_chain/src/eth1_chain.rs index 1287ef16b0..eadd846de9 100644 --- a/beacon_node/beacon_chain/src/eth1_chain.rs +++ b/beacon_node/beacon_chain/src/eth1_chain.rs @@ -813,7 +813,7 @@ mod test { .write() .cache .insert_log(log.clone()) - .expect("should insert log") + .expect("should insert log"); }) .collect(); diff --git a/beacon_node/eth1/src/deposit_cache.rs b/beacon_node/eth1/src/deposit_cache.rs index b1bb0c9f5f..cd16bfbf94 100644 --- a/beacon_node/eth1/src/deposit_cache.rs +++ b/beacon_node/eth1/src/deposit_cache.rs @@ -102,6 +102,12 @@ impl Default for DepositCache { } } +#[derive(Debug, PartialEq)] +pub enum DepositCacheInsertOutcome { + Inserted, + Duplicate, +} + impl DepositCache { /// Create new `DepositCache` given block number at which deposit /// contract was deployed. @@ -146,7 +152,7 @@ impl DepositCache { /// /// - If a log with index `log.index - 1` is not already present in `self` (ignored when empty). /// - If a log with `log.index` is already known, but the given `log` is distinct to it. - pub fn insert_log(&mut self, log: DepositLog) -> Result<(), Error> { + pub fn insert_log(&mut self, log: DepositLog) -> Result { match log.index.cmp(&(self.logs.len() as u64)) { Ordering::Equal => { let deposit = log.deposit_data.tree_hash_root(); @@ -156,11 +162,11 @@ impl DepositCache { .push_leaf(deposit) .map_err(Error::DepositTreeError)?; self.deposit_roots.push(self.deposit_tree.root()); - Ok(()) + Ok(DepositCacheInsertOutcome::Inserted) } Ordering::Less => { if self.logs[log.index as usize] == log { - Ok(()) + Ok(DepositCacheInsertOutcome::Duplicate) } else { Err(Error::DuplicateDistinctLog(log.index)) } @@ -314,7 +320,7 @@ pub mod tests { for i in 0..16 { let mut log = example_log(); log.index = i; - tree.insert_log(log).expect("should add consecutive logs") + tree.insert_log(log).expect("should add consecutive logs"); } } @@ -325,13 +331,16 @@ pub mod tests { for i in 0..4 { let mut log = example_log(); log.index = i; - tree.insert_log(log).expect("should add consecutive logs") + tree.insert_log(log).expect("should add consecutive logs"); } // Add duplicate, when given is the same as the one known. let mut log = example_log(); log.index = 3; - assert!(tree.insert_log(log).is_ok()); + assert_eq!( + tree.insert_log(log).unwrap(), + DepositCacheInsertOutcome::Duplicate + ); // Add duplicate, when given is different to the one known. let mut log = example_log(); @@ -355,7 +364,7 @@ pub mod tests { log.index = i; log.block_number = i; log.deposit_data.withdrawal_credentials = Hash256::from_low_u64_be(i); - tree.insert_log(log).expect("should add consecutive logs") + tree.insert_log(log).expect("should add consecutive logs"); } // Get 0 deposits, with max deposit count. @@ -432,7 +441,7 @@ pub mod tests { log.index = i; log.block_number = i; log.deposit_data.withdrawal_credentials = Hash256::from_low_u64_be(i); - tree.insert_log(log).expect("should add consecutive logs") + tree.insert_log(log).expect("should add consecutive logs"); } // Range too high. diff --git a/beacon_node/eth1/src/service.rs b/beacon_node/eth1/src/service.rs index 1899d44f60..6b23a43a44 100644 --- a/beacon_node/eth1/src/service.rs +++ b/beacon_node/eth1/src/service.rs @@ -1,7 +1,7 @@ use crate::metrics; use crate::{ block_cache::{BlockCache, Error as BlockCacheError, Eth1Block}, - deposit_cache::Error as DepositCacheError, + deposit_cache::{DepositCacheInsertOutcome, Error as DepositCacheError}, http::{ get_block, get_block_number, get_chain_id, get_deposit_logs_in_range, get_network_id, BlockQuery, Eth1Id, @@ -866,12 +866,13 @@ impl Service { .collect::, _>>()? .into_iter() .map(|deposit_log| { - cache + if let DepositCacheInsertOutcome::Inserted = cache .cache .insert_log(deposit_log) - .map_err(Error::FailedToInsertDeposit)?; - - logs_imported += 1; + .map_err(Error::FailedToInsertDeposit)? + { + logs_imported += 1; + } Ok(()) }) diff --git a/beacon_node/eth1/tests/test.rs b/beacon_node/eth1/tests/test.rs index 4e58f82fa3..ce542d4828 100644 --- a/beacon_node/eth1/tests/test.rs +++ b/beacon_node/eth1/tests/test.rs @@ -521,7 +521,7 @@ mod deposit_tree { .map(|raw| raw.to_deposit_log(spec).expect("should parse deposit log")) .inspect(|log| { tree.insert_log(log.clone()) - .expect("should add consecutive logs") + .expect("should add consecutive logs"); }) .collect(); diff --git a/common/lighthouse_version/src/lib.rs b/common/lighthouse_version/src/lib.rs index e321152e49..094e8e444d 100644 --- a/common/lighthouse_version/src/lib.rs +++ b/common/lighthouse_version/src/lib.rs @@ -9,7 +9,7 @@ use target_info::Target; /// /// `Lighthouse/v0.2.0-1419501f2+` pub const VERSION: &str = git_version!( - args = ["--always", "--dirty=+"], + args = ["--always", "--dirty=+", "--abbrev=7"], prefix = "Lighthouse/v1.0.3-", fallback = "unknown" ); diff --git a/validator_client/src/cli.rs b/validator_client/src/cli.rs index 5cc5ab60a0..99bf643f21 100644 --- a/validator_client/src/cli.rs +++ b/validator_client/src/cli.rs @@ -37,7 +37,6 @@ pub fn cli_app<'a, 'b>() -> App<'a, 'b> { ) .takes_value(true) .conflicts_with("datadir") - .requires("secrets-dir") ) .arg( Arg::with_name("secrets-dir") @@ -51,7 +50,6 @@ pub fn cli_app<'a, 'b>() -> App<'a, 'b> { ) .takes_value(true) .conflicts_with("datadir") - .requires("validators-dir"), ) .arg( Arg::with_name("delete-lockfiles") diff --git a/validator_client/src/config.rs b/validator_client/src/config.rs index a222258ed9..c43a029291 100644 --- a/validator_client/src/config.rs +++ b/validator_client/src/config.rs @@ -82,10 +82,10 @@ impl Config { validator_dir = Some(base_dir.join(DEFAULT_VALIDATOR_DIR)); secrets_dir = Some(base_dir.join(DEFAULT_SECRET_DIR)); } - if cli_args.value_of("validators-dir").is_some() - && cli_args.value_of("secrets-dir").is_some() - { + if cli_args.value_of("validators-dir").is_some() { validator_dir = Some(parse_required(cli_args, "validators-dir")?); + } + if cli_args.value_of("secrets-dir").is_some() { secrets_dir = Some(parse_required(cli_args, "secrets-dir")?); }