diff --git a/.github/workflows/local-testnet.yml b/.github/workflows/local-testnet.yml index 455931aa1e..7bd8b40d76 100644 --- a/.github/workflows/local-testnet.yml +++ b/.github/workflows/local-testnet.yml @@ -67,6 +67,7 @@ jobs: working-directory: scripts/local_testnet - name: Upload logs artifact + if: always() uses: actions/upload-artifact@v4 with: name: logs-local-testnet @@ -125,6 +126,7 @@ jobs: working-directory: scripts/tests - name: Upload logs artifact + if: always() uses: actions/upload-artifact@v4 with: name: logs-doppelganger-protection-success @@ -160,6 +162,7 @@ jobs: working-directory: scripts/tests - name: Upload logs artifact + if: always() uses: actions/upload-artifact@v4 with: name: logs-doppelganger-protection-failure @@ -167,6 +170,48 @@ jobs: scripts/local_testnet/logs retention-days: 3 + # Tests checkpoint syncing to a live network (current fork) and a running devnet (usually next scheduled fork) + checkpoint-sync-test: + name: checkpoint-sync-test-${{ matrix.network }} + runs-on: ubuntu-latest + needs: dockerfile-ubuntu + if: contains(github.event.pull_request.labels.*.name, 'syncing') + continue-on-error: true + strategy: + matrix: + network: [sepolia, devnet] + steps: + - uses: actions/checkout@v4 + + - name: Install Kurtosis + run: | + echo "deb [trusted=yes] https://apt.fury.io/kurtosis-tech/ /" | sudo tee /etc/apt/sources.list.d/kurtosis.list + sudo apt update + sudo apt install -y kurtosis-cli + kurtosis analytics disable + + - name: Download Docker image artifact + uses: actions/download-artifact@v4 + with: + name: lighthouse-docker + path: . + + - name: Load Docker image + run: docker load -i lighthouse-docker.tar + + - name: Run the checkpoint sync test script + run: | + ./checkpoint-sync.sh "sync-${{ matrix.network }}" "checkpoint-sync-config-${{ matrix.network }}.yaml" + working-directory: scripts/tests + + - name: Upload logs artifact + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-checkpoint-sync-${{ matrix.network }} + path: | + scripts/local_testnet/logs + retention-days: 3 # This job succeeds ONLY IF all others succeed. It is used by the merge queue to determine whether # a PR is safe to merge. New jobs should be added here. @@ -182,4 +227,6 @@ jobs: steps: - uses: actions/checkout@v4 - name: Check that success job is dependent on all others - run: ./scripts/ci/check-success-job.sh ./.github/workflows/local-testnet.yml local-testnet-success + run: | + exclude_jobs='checkpoint-sync-test' + ./scripts/ci/check-success-job.sh ./.github/workflows/local-testnet.yml local-testnet-success "$exclude_jobs" diff --git a/Cargo.lock b/Cargo.lock index 48a39cf304..70c910aadc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2576,6 +2576,8 @@ dependencies = [ "bls", "compare_fields", "compare_fields_derive", + "context_deserialize", + "context_deserialize_derive", "derivative", "eth2_network_config", "ethereum_ssz", diff --git a/Makefile b/Makefile index fe5dfbe551..75b6811b74 100644 --- a/Makefile +++ b/Makefile @@ -218,6 +218,9 @@ run-state-transition-tests: # Downloads and runs the EF test vectors. test-ef: make-ef-tests run-ef-tests +# Downloads and runs the nightly EF test vectors. +test-ef-nightly: make-ef-tests-nightly run-ef-tests + # Downloads and runs the EF test vectors with nextest. nextest-ef: make-ef-tests nextest-run-ef-tests @@ -278,6 +281,10 @@ lint-full: make-ef-tests: make -C $(EF_TESTS) +# Download/extract the nightly EF test vectors. +make-ef-tests-nightly: + CONSENSUS_SPECS_TEST_VERSION=nightly make -C $(EF_TESTS) + # Verifies that crates compile with fuzzing features enabled arbitrary-fuzz: cargo check -p state_processing --features arbitrary-fuzz,$(TEST_FEATURES) diff --git a/beacon_node/network/src/sync/block_lookups/mod.rs b/beacon_node/network/src/sync/block_lookups/mod.rs index 8c884f644e..96b088747b 100644 --- a/beacon_node/network/src/sync/block_lookups/mod.rs +++ b/beacon_node/network/src/sync/block_lookups/mod.rs @@ -106,7 +106,7 @@ pub type SingleLookupId = u32; enum Action { Retry, ParentUnknown { parent_root: Hash256 }, - Drop, + Drop(/* reason: */ String), Continue, } @@ -194,19 +194,22 @@ impl BlockLookups { /// Creates a parent lookup for the block with the given `block_root` and immediately triggers it. /// If a parent lookup exists or is triggered, a current lookup will be created. + /// + /// Returns true if the lookup is created or already exists #[instrument(parent = None, level = "info", fields(service = "lookup_sync"), name = "lookup_sync", skip_all )] + #[must_use = "only reference the new lookup if returns true"] pub fn search_child_and_parent( &mut self, block_root: Hash256, block_component: BlockComponent, peer_id: PeerId, cx: &mut SyncNetworkContext, - ) { + ) -> bool { let parent_root = block_component.parent_root(); let parent_lookup_exists = @@ -223,11 +226,14 @@ impl BlockLookups { // the lookup with zero peers to house the block components. &[], cx, - ); + ) + } else { + false } } /// Seach a block whose parent root is unknown. + /// /// Returns true if the lookup is created or already exists #[instrument(parent = None, level = "info", @@ -235,13 +241,14 @@ impl BlockLookups { name = "lookup_sync", skip_all )] + #[must_use = "only reference the new lookup if returns true"] pub fn search_unknown_block( &mut self, block_root: Hash256, peer_source: &[PeerId], cx: &mut SyncNetworkContext, - ) { - self.new_current_lookup(block_root, None, None, peer_source, cx); + ) -> bool { + self.new_current_lookup(block_root, None, None, peer_source, cx) } /// A block or blob triggers the search of a parent. @@ -256,6 +263,7 @@ impl BlockLookups { name = "lookup_sync", skip_all )] + #[must_use = "only reference the new lookup if returns true"] pub fn search_parent_of_child( &mut self, block_root_to_search: Hash256, @@ -363,6 +371,7 @@ impl BlockLookups { name = "lookup_sync", skip_all )] + #[must_use = "only reference the new lookup if returns true"] fn new_current_lookup( &mut self, block_root: Hash256, @@ -656,7 +665,7 @@ impl BlockLookups { // This is unreachable because RPC blocks do not undergo gossip verification, and // this error can *only* come from gossip verification. error!(?block_root, "Single block lookup hit unreachable condition"); - Action::Drop + Action::Drop("DuplicateImportStatusUnknown".to_owned()) } BlockProcessingResult::Ignored => { // Beacon processor signalled to ignore the block processing result. @@ -665,14 +674,14 @@ impl BlockLookups { component = ?R::response_type(), "Lookup component processing ignored, cpu might be overloaded" ); - Action::Drop + Action::Drop("Block processing ignored".to_owned()) } BlockProcessingResult::Err(e) => { match e { BlockError::BeaconChainError(e) => { // Internal error error!(%block_root, error = ?e, "Beacon chain error processing lookup component"); - Action::Drop + Action::Drop(format!("{e:?}")) } BlockError::ParentUnknown { parent_root, .. } => { // Reverts the status of this request to `AwaitingProcessing` holding the @@ -691,7 +700,7 @@ impl BlockLookups { error = ?e, "Single block lookup failed. Execution layer is offline / unsynced / misconfigured" ); - Action::Drop + Action::Drop(format!("{e:?}")) } BlockError::AvailabilityCheck(e) if e.category() == AvailabilityCheckErrorCategory::Internal => @@ -703,7 +712,7 @@ impl BlockLookups { // lookup state transition. This error invalidates both blob and block requests, and we don't know the // stateĀ of both requests. Blobs may have already successfullly processed for example. // We opt to drop the lookup instead. - Action::Drop + Action::Drop(format!("{e:?}")) } other => { debug!( @@ -757,19 +766,32 @@ impl BlockLookups { } Action::ParentUnknown { parent_root } => { let peers = lookup.all_peers(); + // Mark lookup as awaiting **before** creating the parent lookup. At this point the + // lookup maybe inconsistent. lookup.set_awaiting_parent(parent_root); - debug!( - id = lookup.id, - ?block_root, - ?parent_root, - "Marking lookup as awaiting parent" - ); - self.search_parent_of_child(parent_root, block_root, &peers, cx); - Ok(LookupResult::Pending) + let parent_lookup_exists = + self.search_parent_of_child(parent_root, block_root, &peers, cx); + if parent_lookup_exists { + // The parent lookup exist or has been created. It's safe for `lookup` to + // reference the parent as awaiting. + debug!( + id = lookup_id, + ?block_root, + ?parent_root, + "Marking lookup as awaiting parent" + ); + Ok(LookupResult::Pending) + } else { + // The parent lookup is faulty and was not created, we must drop the `lookup` as + // it's in an inconsistent state. We must drop all of its children too. + Err(LookupRequestError::Failed(format!( + "Parent lookup is faulty {parent_root:?}" + ))) + } } - Action::Drop => { + Action::Drop(reason) => { // Drop with noop - Err(LookupRequestError::Failed) + Err(LookupRequestError::Failed(reason)) } Action::Continue => { // Drop this completed lookup only diff --git a/beacon_node/network/src/sync/block_lookups/single_block_lookup.rs b/beacon_node/network/src/sync/block_lookups/single_block_lookup.rs index 3789dbe91e..30947cf1f0 100644 --- a/beacon_node/network/src/sync/block_lookups/single_block_lookup.rs +++ b/beacon_node/network/src/sync/block_lookups/single_block_lookup.rs @@ -40,7 +40,7 @@ pub enum LookupRequestError { /// Inconsistent lookup request state BadState(String), /// Lookup failed for some other reason and should be dropped - Failed, + Failed(/* reason: */ String), /// Received MissingComponents when all components have been processed. This should never /// happen, and indicates some internal bug MissingComponentsAfterAllProcessed, diff --git a/beacon_node/network/src/sync/manager.rs b/beacon_node/network/src/sync/manager.rs index 3c94793941..13145401e4 100644 --- a/beacon_node/network/src/sync/manager.rs +++ b/beacon_node/network/src/sync/manager.rs @@ -931,12 +931,20 @@ impl SyncManager { ) { match self.should_search_for_block(Some(slot), &peer_id) { Ok(_) => { - self.block_lookups.search_child_and_parent( + if self.block_lookups.search_child_and_parent( block_root, block_component, peer_id, &mut self.network, - ); + ) { + // Lookup created. No need to log here it's logged in `new_current_lookup` + } else { + debug!( + ?block_root, + ?parent_root, + "No lookup created for child and parent" + ); + } } Err(reason) => { debug!(%block_root, %parent_root, reason, "Ignoring unknown parent request"); @@ -947,8 +955,15 @@ impl SyncManager { fn handle_unknown_block_root(&mut self, peer_id: PeerId, block_root: Hash256) { match self.should_search_for_block(None, &peer_id) { Ok(_) => { - self.block_lookups - .search_unknown_block(block_root, &[peer_id], &mut self.network); + if self.block_lookups.search_unknown_block( + block_root, + &[peer_id], + &mut self.network, + ) { + // Lookup created. No need to log here it's logged in `new_current_lookup` + } else { + debug!(?block_root, "No lookup created for unknown block"); + } } Err(reason) => { debug!(%block_root, reason, "Ignoring unknown block request"); diff --git a/beacon_node/network/src/sync/tests/lookups.rs b/beacon_node/network/src/sync/tests/lookups.rs index 84ff1c7e25..e7e6ff5970 100644 --- a/beacon_node/network/src/sync/tests/lookups.rs +++ b/beacon_node/network/src/sync/tests/lookups.rs @@ -1725,6 +1725,63 @@ fn test_parent_lookup_too_deep_grow_ancestor() { rig.assert_failed_chain(chain_hash); } +// Regression test for https://github.com/sigp/lighthouse/pull/7118 +#[test] +fn test_child_lookup_not_created_for_failed_chain_parent_after_processing() { + // GIVEN: A parent chain longer than PARENT_DEPTH_TOLERANCE. + let mut rig = TestRig::test_setup(); + let mut blocks = rig.rand_blockchain(PARENT_DEPTH_TOLERANCE + 1); + let peer_id = rig.new_connected_peer(); + + // The child of the trigger block to be used to extend the chain. + let trigger_block_child = blocks.pop().unwrap(); + // The trigger block that starts the lookup. + let trigger_block = blocks.pop().unwrap(); + let tip_root = trigger_block.canonical_root(); + + // Trigger the initial unknown parent block for the tip. + rig.trigger_unknown_parent_block(peer_id, trigger_block.clone()); + + // Simulate the lookup chain building up via `ParentUnknown` errors. + for block in blocks.into_iter().rev() { + let id = rig.expect_block_parent_request(block.canonical_root()); + rig.parent_lookup_block_response(id, peer_id, Some(block.clone())); + rig.parent_lookup_block_response(id, peer_id, None); + rig.expect_block_process(ResponseType::Block); + rig.parent_block_processed( + tip_root, + BlockProcessingResult::Err(BlockError::ParentUnknown { + parent_root: block.parent_root(), + }), + ); + } + + // At this point, the chain should have been deemed too deep and pruned. + // The tip root should have been inserted into failed chains. + rig.assert_failed_chain(tip_root); + rig.expect_no_penalty_for(peer_id); + + // WHEN: Trigger the extending block that points to the tip. + let trigger_block_child_root = trigger_block_child.canonical_root(); + rig.trigger_unknown_block_from_attestation(trigger_block_child_root, peer_id); + let id = rig.expect_block_lookup_request(trigger_block_child_root); + rig.single_lookup_block_response(id, peer_id, Some(trigger_block_child.clone())); + rig.single_lookup_block_response(id, peer_id, None); + rig.expect_block_process(ResponseType::Block); + rig.single_block_component_processed( + id.lookup_id, + BlockProcessingResult::Err(BlockError::ParentUnknown { + parent_root: tip_root, + }), + ); + + // THEN: The extending block should not create a lookup because the tip was inserted into failed chains. + rig.expect_no_active_lookups(); + // AND: The peer should be penalized for extending a failed chain. + rig.expect_single_penalty(peer_id, "failed_chain"); + rig.expect_empty_network(); +} + #[test] fn test_parent_lookup_too_deep_grow_tip() { let mut rig = TestRig::test_setup(); diff --git a/consensus/types/presets/minimal/electra.yaml b/consensus/types/presets/minimal/electra.yaml index f99effe0f1..22e26e4025 100644 --- a/consensus/types/presets/minimal/electra.yaml +++ b/consensus/types/presets/minimal/electra.yaml @@ -32,10 +32,10 @@ MAX_ATTESTATIONS_ELECTRA: 8 # Execution # --------------------------------------------------------------- -# [customized] 2**2 (= 4) deposit requests -MAX_DEPOSIT_REQUESTS_PER_PAYLOAD: 4 -# [customized] 2**1 (= 2) withdrawal requests -MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD: 2 +# 2**13 (= 8,192) deposit requests +MAX_DEPOSIT_REQUESTS_PER_PAYLOAD: 8192 +# 2**4 (= 16) withdrawal requests +MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD: 16 # 2**1 (= 2) consolidation requests MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: 2 diff --git a/consensus/types/src/data_column_sidecar.rs b/consensus/types/src/data_column_sidecar.rs index 5ec2b28b2b..612ddb6eb8 100644 --- a/consensus/types/src/data_column_sidecar.rs +++ b/consensus/types/src/data_column_sidecar.rs @@ -6,12 +6,14 @@ use crate::{ SignedBeaconBlockHeader, Slot, }; use bls::Signature; +use context_deserialize::ContextDeserialize; use derivative::Derivative; use kzg::Error as KzgError; use kzg::{KzgCommitment, KzgProof}; use merkle_proof::verify_merkle_proof; use safe_arith::ArithError; -use serde::{Deserialize, Serialize}; +use serde::de::Error; +use serde::{Deserialize, Deserializer, Serialize}; use ssz::{DecodeError, Encode}; use ssz_derive::{Decode, Encode}; use ssz_types::Error as SszError; @@ -26,12 +28,49 @@ pub type Cell = FixedVector::BytesPerCell>; pub type DataColumn = VariableList, ::MaxBlobCommitmentsPerBlock>; /// Identifies a set of data columns associated with a specific beacon block. -#[derive(Encode, Clone, Debug, PartialEq)] +#[derive(Encode, Clone, Debug, PartialEq, TreeHash)] pub struct DataColumnsByRootIdentifier { pub block_root: Hash256, pub columns: RuntimeVariableList, } +impl<'de> ContextDeserialize<'de, (ForkName, usize)> for DataColumnsByRootIdentifier { + fn context_deserialize(deserializer: D, context: (ForkName, usize)) -> Result + where + D: Deserializer<'de>, + { + #[derive(Deserialize)] + struct Helper { + block_root: Hash256, + columns: serde_json::Value, + } + + let helper = Helper::deserialize(deserializer)?; + Ok(Self { + block_root: helper.block_root, + columns: RuntimeVariableList::context_deserialize(helper.columns, context) + .map_err(Error::custom)?, + }) + } +} + +impl DataColumnsByRootIdentifier { + pub fn from_ssz_bytes(bytes: &[u8], num_columns: usize) -> Result { + let mut builder = ssz::SszDecoderBuilder::new(bytes); + builder.register_type::()?; + builder.register_anonymous_variable_length_item()?; + + let mut decoder = builder.build()?; + let block_root = decoder.decode_next()?; + let columns = decoder + .decode_next_with(|bytes| RuntimeVariableList::from_ssz_bytes(bytes, num_columns))?; + Ok(DataColumnsByRootIdentifier { + block_root, + columns, + }) + } +} + impl RuntimeVariableList { pub fn from_ssz_bytes_with_nested( bytes: &[u8], @@ -47,21 +86,7 @@ impl RuntimeVariableList { Some(max_len), )? .into_iter() - .map(|bytes| { - let mut builder = ssz::SszDecoderBuilder::new(&bytes); - builder.register_type::()?; - builder.register_anonymous_variable_length_item()?; - - let mut decoder = builder.build()?; - let block_root = decoder.decode_next()?; - let columns = decoder.decode_next_with(|bytes| { - RuntimeVariableList::from_ssz_bytes(bytes, num_columns) - })?; - Ok(DataColumnsByRootIdentifier { - block_root, - columns, - }) - }) + .map(|bytes| DataColumnsByRootIdentifier::from_ssz_bytes(&bytes, num_columns)) .collect::, _>>()?; Ok(RuntimeVariableList::from_vec(vec, max_len)) diff --git a/consensus/types/src/eth_spec.rs b/consensus/types/src/eth_spec.rs index 6f1b3e6ce6..1cde9c2e48 100644 --- a/consensus/types/src/eth_spec.rs +++ b/consensus/types/src/eth_spec.rs @@ -476,8 +476,6 @@ impl EthSpec for MinimalEthSpec { type KzgCommitmentInclusionProofDepth = U10; type PendingPartialWithdrawalsLimit = U64; type PendingConsolidationsLimit = U64; - type MaxDepositRequestsPerPayload = U4; - type MaxWithdrawalRequestsPerPayload = U2; type FieldElementsPerCell = U64; type FieldElementsPerExtBlob = U8192; type MaxCellsPerBlock = U33554432; @@ -509,7 +507,9 @@ impl EthSpec for MinimalEthSpec { MaxPendingDepositsPerEpoch, MaxConsolidationRequestsPerPayload, MaxAttesterSlashingsElectra, - MaxAttestationsElectra + MaxAttestationsElectra, + MaxDepositRequestsPerPayload, + MaxWithdrawalRequestsPerPayload }); fn default_spec() -> ChainSpec { diff --git a/consensus/types/src/runtime_var_list.rs b/consensus/types/src/runtime_var_list.rs index 454c8b9e18..2a8899e203 100644 --- a/consensus/types/src/runtime_var_list.rs +++ b/consensus/types/src/runtime_var_list.rs @@ -6,6 +6,7 @@ use ssz::Decode; use ssz_types::Error; use std::ops::{Deref, Index, IndexMut}; use std::slice::SliceIndex; +use tree_hash::{Hash256, MerkleHasher, PackedEncoding, TreeHash, TreeHashType}; /// Emulates a SSZ `List`. /// @@ -241,6 +242,62 @@ where } } +impl TreeHash for RuntimeVariableList { + fn tree_hash_type() -> tree_hash::TreeHashType { + tree_hash::TreeHashType::List + } + + fn tree_hash_packed_encoding(&self) -> PackedEncoding { + unreachable!("List should never be packed.") + } + + fn tree_hash_packing_factor() -> usize { + unreachable!("List should never be packed.") + } + + fn tree_hash_root(&self) -> Hash256 { + let root = runtime_vec_tree_hash_root::(&self.vec, self.max_len); + + tree_hash::mix_in_length(&root, self.len()) + } +} + +// We can delete this once the upstream `vec_tree_hash_root` is modified to use a runtime max len. +pub fn runtime_vec_tree_hash_root(vec: &[T], max_len: usize) -> Hash256 +where + T: TreeHash, +{ + match T::tree_hash_type() { + TreeHashType::Basic => { + let mut hasher = + MerkleHasher::with_leaves(max_len.div_ceil(T::tree_hash_packing_factor())); + + for item in vec { + hasher + .write(&item.tree_hash_packed_encoding()) + .expect("ssz_types variable vec should not contain more elements than max"); + } + + hasher + .finish() + .expect("ssz_types variable vec should not have a remaining buffer") + } + TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => { + let mut hasher = MerkleHasher::with_leaves(max_len); + + for item in vec { + hasher + .write(item.tree_hash_root().as_slice()) + .expect("ssz_types vec should not contain more elements than max"); + } + + hasher + .finish() + .expect("ssz_types vec should not have a remaining buffer") + } + } +} + #[cfg(test)] mod test { use super::*; diff --git a/scripts/ci/check-success-job.sh b/scripts/ci/check-success-job.sh index dfa5c03257..2eee35f69e 100755 --- a/scripts/ci/check-success-job.sh +++ b/scripts/ci/check-success-job.sh @@ -5,8 +5,13 @@ set -euf -o pipefail YAML=$1 SUCCESS_JOB=$2 +EXCLUDE_JOBS_REGEX=${3:-} + +yq '... comments="" | .jobs | map(. | key) | .[]' < "$YAML" | + grep -v "$SUCCESS_JOB" | + { [ -n "$EXCLUDE_JOBS_REGEX" ] && grep -Ev "$EXCLUDE_JOBS_REGEX" || cat; } | + sort > all_jobs.txt -yq '... comments="" | .jobs | map(. | key) | .[]' < "$YAML" | grep -v "$SUCCESS_JOB" | sort > all_jobs.txt yq "... comments=\"\" | .jobs.$SUCCESS_JOB.needs[]" < "$YAML" | grep -v "$SUCCESS_JOB" | sort > dep_jobs.txt diff all_jobs.txt dep_jobs.txt || (echo "COMPLETENESS CHECK FAILED" && exit 1) rm all_jobs.txt dep_jobs.txt diff --git a/scripts/tests/checkpoint-sync-config-devnet.yaml b/scripts/tests/checkpoint-sync-config-devnet.yaml new file mode 100644 index 0000000000..e81e5d4401 --- /dev/null +++ b/scripts/tests/checkpoint-sync-config-devnet.yaml @@ -0,0 +1,16 @@ +# Kurtosis config file to checkpoint sync to a running devnet supported by ethPandaOps and `ethereum-package`. +participants: + - cl_type: lighthouse + cl_image: lighthouse:local + supernode: true + - cl_type: lighthouse + cl_image: lighthouse:local + supernode: false + +checkpoint_sync_enabled: true +checkpoint_sync_url: "https://checkpoint-sync.fusaka-devnet-0.ethpandaops.io" + +global_log_level: debug + +network_params: + network: fusaka-devnet-0 diff --git a/scripts/tests/checkpoint-sync-config-sepolia.yaml b/scripts/tests/checkpoint-sync-config-sepolia.yaml new file mode 100644 index 0000000000..2adf9c22b8 --- /dev/null +++ b/scripts/tests/checkpoint-sync-config-sepolia.yaml @@ -0,0 +1,16 @@ +# Kurtosis config file to checkpoint sync to a live network (Sepolia). +participants: + - cl_type: lighthouse + cl_image: lighthouse:local + supernode: true + - cl_type: lighthouse + cl_image: lighthouse:local + supernode: false + +checkpoint_sync_enabled: true +checkpoint_sync_url: "https://checkpoint-sync.sepolia.ethpandaops.io" + +global_log_level: debug + +network_params: + network: sepolia diff --git a/scripts/tests/checkpoint-sync.sh b/scripts/tests/checkpoint-sync.sh new file mode 100755 index 0000000000..a170d1e94d --- /dev/null +++ b/scripts/tests/checkpoint-sync.sh @@ -0,0 +1,127 @@ +#!/usr/bin/env bash +# +# Checkpoint sync to a live network. +# +# Start with checkpoint sync and let the node(s) sync to head and perform backfill for a specified number of slots. +# This test ensures we cover all sync components (range, lookup, backfill) and measures sync speed +# to detect any performance regressions. +SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" + +ENCLAVE_NAME=${1:-sync-testnet} +CONFIG=${2:-$SCRIPT_DIR/checkpoint-sync-config-sepolia.yaml} + +# Test configuration +# ------------------------------------------------------ +# Interval for polling the /lighthouse/syncing endpoint for sync status +POLL_INTERVAL_SECS=5 +# Target number of slots to backfill to complete this test. +TARGET_BACKFILL_SLOTS=1024 +# Timeout for this test, if the node(s) fail to backfill `TARGET_BACKFILL_SLOTS` slots, fail the test. +TIMEOUT_MINS=10 +TIMEOUT_SECS=$((TIMEOUT_MINS * 60)) +# ------------------------------------------------------ + +# Polls a single node's sync status +poll_node() { + local node_type=$1 + local url=${node_urls[$node_type]} + + response=$(curl -s "${url}/lighthouse/syncing") + + if [ -z "$response" ] || [ "$response" = "null" ]; then + echo "${node_type} status: No response or null response" + return + fi + + # Print syncing status + sync_state=$(echo "$response" | jq -r 'if (.data | type) == "object" then "object" else "string" end' 2>/dev/null) + + if [ "$sync_state" = "object" ]; then + status=$(echo "$response" | jq -r '.data | keys[0] // "Unknown"') + fields=$(echo "$response" | jq -r ".data.${status} | to_entries | map(\"\(.key): \(.value)\") | join(\", \")") + echo "${node_type} status: ${status}, ${fields}" + else + status=$(echo "$response" | jq -r '.data' 2>/dev/null) + echo "${node_type} status: ${status:-Unknown}" + fi + + # Check for completion criteria + if [ "$status" = "BackFillSyncing" ]; then + completed=$(echo "$response" | jq -r ".data.${status}.completed // 0") + if [ "$completed" -ge "$TARGET_BACKFILL_SLOTS" ]; then + mark_node_complete "$node_type" + fi + fi + # For other states (Synced, SyncingFinalized, SyncingHead, SyncTransition, Stalled, Unknown), + # we continue polling + # NOTE: there is a bug where Lighthouse briefly switch to "Synced" before completing backfilling. We ignore this state + # as it's unlikely a node is fully synced without going through backfilling `TARGET_BACKFILL_SLOTS` slots (only + # possible on a new network). +} + +# Marks a node as complete and record time +mark_node_complete() { + local node_type=$1 + if [ "${node_completed[$node_type]}" = false ]; then + node_completed[$node_type]=true + node_complete_time[$node_type]=$(date +%s) + echo "${node_type} completed backfill in $((node_complete_time[$node_type] - start_time)) seconds" + fi +} + +exit_and_dump_logs() { + local exit_code=$1 + echo "Shutting down..." + $SCRIPT_DIR/../local_testnet/stop_local_testnet.sh $ENCLAVE_NAME + echo "Test completed with exit code $exit_code." + exit $exit_code +} + +# Start the nodes +$SCRIPT_DIR/../local_testnet/start_local_testnet.sh -e $ENCLAVE_NAME -b false -n $CONFIG +if [ $? -ne 0 ]; then + echo "Failed to start local testnet" + exit_and_dump_logs 1 +fi + +start_time=$(date +%s) + +# Get all beacon API URLs +supernode_url=$(kurtosis port print $ENCLAVE_NAME cl-1-lighthouse-geth http) +fullnode_url=$(kurtosis port print $ENCLAVE_NAME cl-2-lighthouse-geth http) + +# Initialize statuses +declare -A node_completed +declare -A node_complete_time +declare -A node_urls + +node_urls["supernode"]="$supernode_url" +node_urls["fullnode"]="$fullnode_url" +node_completed["supernode"]=false +node_completed["fullnode"]=false + +echo "Polling sync status until backfill reaches ${TARGET_BACKFILL_SLOTS} slots or timeout of ${TIMEOUT_MINS} mins" + +while [ "${node_completed[supernode]}" = false ] || [ "${node_completed[fullnode]}" = false ]; do + current_time=$(date +%s) + elapsed=$((current_time - start_time)) + + if [ "$elapsed" -ge "$TIMEOUT_SECS" ]; then + echo "ERROR: Nodes timed out syncing after ${TIMEOUT_MINS} minutes. Exiting." + exit_and_dump_logs 1 + fi + + # Poll each node that hasn't completed yet + for node in "supernode" "fullnode"; do + if [ "${node_completed[$node]}" = false ]; then + poll_node "$node" + fi + done + + sleep $POLL_INTERVAL_SECS +done + +echo "Sync test complete! Both supernode and fullnode have synced to HEAD and backfilled ${TARGET_BACKFILL_SLOTS} slots." +echo "Supernode time: $((node_complete_time[supernode] - start_time)) seconds" +echo "Fullnode time: $((node_complete_time[fullnode] - start_time)) seconds" +exit_and_dump_logs 0 \ No newline at end of file diff --git a/testing/ef_tests/Cargo.toml b/testing/ef_tests/Cargo.toml index d93f3a5578..f6d6837f71 100644 --- a/testing/ef_tests/Cargo.toml +++ b/testing/ef_tests/Cargo.toml @@ -7,6 +7,7 @@ edition = { workspace = true } [features] # `ef_tests` feature must be enabled to actually run the tests ef_tests = [] +disable_rayon = [] fake_crypto = ["bls/fake_crypto"] portable = ["beacon_chain/portable"] @@ -16,6 +17,8 @@ beacon_chain = { workspace = true } bls = { workspace = true } compare_fields = { workspace = true } compare_fields_derive = { workspace = true } +context_deserialize = { workspace = true } +context_deserialize_derive = { workspace = true } derivative = { workspace = true } eth2_network_config = { workspace = true } ethereum_ssz = { workspace = true } diff --git a/testing/ef_tests/Makefile b/testing/ef_tests/Makefile index c3a56ec11a..6b780b1f92 100644 --- a/testing/ef_tests/Makefile +++ b/testing/ef_tests/Makefile @@ -1,44 +1,33 @@ -TESTS_TAG := v1.5.0-beta.4 -TESTS = general minimal mainnet -TARBALLS = $(patsubst %,%-$(TESTS_TAG).tar.gz,$(TESTS)) - +# To download/extract nightly tests, run: +# CONSENSUS_SPECS_TEST_VERSION=nightly make +CONSENSUS_SPECS_TEST_VERSION ?= v1.6.0-alpha.0 REPO_NAME := consensus-spec-tests OUTPUT_DIR := ./$(REPO_NAME) -BASE_URL := https://github.com/ethereum/$(REPO_NAME)/releases/download/$(TESTS_TAG) BLS_TEST_REPO_NAME := bls12-381-tests -BLS_TEST_TAG := v0.1.1 +BLS_TEST_VERSION := v0.1.1 BLS_TEST = bls_tests_yaml -BLS_TARBALL = $(patsubst %,%-$(BLS_TEST_TAG).tar.gz,$(BLS_TEST)) BLS_OUTPUT_DIR := $(OUTPUT_DIR)/$(BLS_TEST_REPO_NAME) -BLS_BASE_URL := https://github.com/ethereum/$(BLS_TEST_REPO_NAME)/releases/download/$(BLS_TEST_TAG) +BLS_BASE_URL := https://github.com/ethereum/$(BLS_TEST_REPO_NAME)/releases/download/$(BLS_TEST_VERSION) -CURL := $(if $(LIGHTHOUSE_GITHUB_TOKEN),curl -L --header "Authorization: $(LIGHTHOUSE_GITHUB_TOKEN)",curl -L) +.PHONY: all clean -all: - make $(OUTPUT_DIR) - make $(BLS_OUTPUT_DIR) +all: clean $(OUTPUT_DIR) $(BLS_OUTPUT_DIR) -$(OUTPUT_DIR): $(TARBALLS) - mkdir $(OUTPUT_DIR) - for test_tarball in $^; do \ - tar -xzf $$test_tarball -C $(OUTPUT_DIR);\ +clean: + rm -rf *.tar.gz $(OUTPUT_DIR) $(BLS_OUTPUT_DIR) + +$(OUTPUT_DIR): + mkdir -p $(OUTPUT_DIR) + ./download_test_vectors.sh $(CONSENSUS_SPECS_TEST_VERSION) + for test_tarball in *.tar.gz; do \ + tar -xzf $$test_tarball -C $(OUTPUT_DIR); \ + rm -f $$test_tarball; \ done $(BLS_OUTPUT_DIR): - mkdir $(BLS_OUTPUT_DIR) - $(CURL) $(BLS_BASE_URL)/$(BLS_TEST).tar.gz -o $(BLS_TARBALL) - tar -xzf $(BLS_TARBALL) -C $(BLS_OUTPUT_DIR) - -%-$(TESTS_TAG).tar.gz: - $(CURL) $(BASE_URL)/$*.tar.gz -o $@ - -clean-test-files: - rm -rf $(OUTPUT_DIR) $(BLS_OUTPUT_DIR) - -clean-archives: - rm -f $(TARBALLS) $(BLS_TARBALL) - -clean: clean-test-files clean-archives - -.PHONY: clean clean-archives clean-test-files + mkdir -p $(BLS_OUTPUT_DIR) + curl --progress-bar --location --remote-name --show-error --retry 3 --retry-all-errors --fail \ + $(BLS_BASE_URL)/$(BLS_TEST).tar.gz + tar -xzf *.tar.gz -C $(BLS_OUTPUT_DIR) + rm -f *.tar.gz diff --git a/testing/ef_tests/README.md b/testing/ef_tests/README.md index 5ffd453d99..b04cd25dc7 100644 --- a/testing/ef_tests/README.md +++ b/testing/ef_tests/README.md @@ -28,6 +28,16 @@ $ cargo test --features ef_tests The tests won't run without the `ef_tests` feature enabled (this is to ensure that a top-level `cargo test --all` won't fail on missing files). +The following is sometimes necessary to avoid stack overflow issues when running on MacOS: +``` +$ export RUST_MIN_STACK=8388608 +``` + +When debugging failing tests, it's often useful to disable parallization and output suppression: +``` +$ cargo test --features ef_tests,disable_rayon -- --nocapture +``` + ## Saving Space When you download the tests, the downloaded archives will be kept in addition to the extracted diff --git a/testing/ef_tests/check_all_files_accessed.py b/testing/ef_tests/check_all_files_accessed.py index 3aeff8ce06..d7568d854f 100755 --- a/testing/ef_tests/check_all_files_accessed.py +++ b/testing/ef_tests/check_all_files_accessed.py @@ -45,13 +45,13 @@ excluded_paths = [ "bls12-381-tests/deserialization_G1", "bls12-381-tests/deserialization_G2", "bls12-381-tests/hash_to_G2", - "tests/.*/eip6110", - "tests/.*/whisk", - # TODO(das): Fulu tests are ignored for now - "tests/.*/fulu", - "tests/.*/fulu/ssz_static/MatrixEntry", - "tests/.*/eip7441", "tests/.*/eip7732", + "tests/.*/eip7805", + # Ignore MatrixEntry SSZ tests for now. + "tests/.*/fulu/ssz_static/MatrixEntry/.*", + # Ignore full epoch tests for now (just test the sub-transitions). + "tests/.*/.*/epoch_processing/.*/pre_epoch.ssz_snappy", + "tests/.*/.*/epoch_processing/.*/post_epoch.ssz_snappy", ] diff --git a/testing/ef_tests/download_test_vectors.sh b/testing/ef_tests/download_test_vectors.sh new file mode 100755 index 0000000000..7297f7eeb8 --- /dev/null +++ b/testing/ef_tests/download_test_vectors.sh @@ -0,0 +1,68 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +TESTS=("general" "minimal" "mainnet") + +version=${1} +if [[ "$version" == "nightly" ]]; then + if [[ -z "${GITHUB_TOKEN:-}" ]]; then + echo "Error GITHUB_TOKEN is not set" + exit 1 + fi + + for cmd in unzip jq; do + if ! command -v "${cmd}" >/dev/null 2>&1; then + echo "Error ${cmd} is not installed" + exit 1 + fi + done + + repo="ethereum/consensus-specs" + api="https://api.github.com" + auth_header="Authorization: token ${GITHUB_TOKEN}" + + run_id=$(curl -s -H "${auth_header}" \ + "${api}/repos/${repo}/actions/workflows/generate_vectors.yml/runs?branch=dev&status=success&per_page=1" | + jq -r '.workflow_runs[0].id') + + if [[ "${run_id}" == "null" || -z "${run_id}" ]]; then + echo "No successful nightly workflow run found" + exit 1 + fi + + echo "Downloading nightly test vectors for run: ${run_id}" + curl -s -H "${auth_header}" "${api}/repos/${repo}/actions/runs/${run_id}/artifacts" | + jq -c '.artifacts[] | {name, url: .archive_download_url}' | + while read -r artifact; do + name=$(echo "${artifact}" | jq -r .name) + url=$(echo "${artifact}" | jq -r .url) + + if [[ "$name" == "consensustestgen.log" ]]; then + continue + fi + + echo "Downloading artifact: ${name}" + curl --progress-bar --location --show-error --retry 3 --retry-all-errors --fail \ + -H "${auth_header}" -H "Accept: application/vnd.github+json" \ + --output "${name}.zip" "${url}" || { + echo "Failed to download ${name}" + exit 1 + } + + unzip -qo "${name}.zip" + rm -f "${name}.zip" + done +else + for test in "${TESTS[@]}"; do + if [[ ! -e "${test}.tar.gz" ]]; then + echo "Downloading: ${version}/${test}.tar.gz" + curl --progress-bar --location --remote-name --show-error --retry 3 --retry-all-errors --fail \ + "https://github.com/ethereum/consensus-spec-tests/releases/download/${version}/${test}.tar.gz" \ + || { + echo "Curl failed. Aborting" + rm -f "${test}.tar.gz" + exit 1 + } + fi + done +fi diff --git a/testing/ef_tests/src/cases.rs b/testing/ef_tests/src/cases.rs index 31662e831a..b6f7cb21a1 100644 --- a/testing/ef_tests/src/cases.rs +++ b/testing/ef_tests/src/cases.rs @@ -22,6 +22,7 @@ mod genesis_validity; mod get_custody_groups; mod kzg_blob_to_kzg_commitment; mod kzg_compute_blob_kzg_proof; +mod kzg_compute_cells; mod kzg_compute_cells_and_kzg_proofs; mod kzg_compute_kzg_proof; mod kzg_recover_cells_and_kzg_proofs; @@ -49,7 +50,7 @@ pub use bls_eth_fast_aggregate_verify::*; pub use bls_fast_aggregate_verify::*; pub use bls_sign_msg::*; pub use bls_verify_msg::*; -pub use common::SszStaticType; +pub use common::{DataColumnsByRootIdentifierWrapper, SszStaticType}; pub use compute_columns_for_custody_groups::*; pub use epoch_processing::*; pub use fork::ForkTest; @@ -58,6 +59,7 @@ pub use genesis_validity::*; pub use get_custody_groups::*; pub use kzg_blob_to_kzg_commitment::*; pub use kzg_compute_blob_kzg_proof::*; +pub use kzg_compute_cells::*; pub use kzg_compute_cells_and_kzg_proofs::*; pub use kzg_compute_kzg_proof::*; pub use kzg_recover_cells_and_kzg_proofs::*; @@ -91,29 +93,29 @@ pub use transition::TransitionTest; /// to return `true` for the feature in order for the feature test vector to be tested. #[derive(Debug, PartialEq, Clone, Copy)] pub enum FeatureName { - // TODO(fulu): to be removed once we start using Fulu types for test vectors. - // Existing SSZ types for PeerDAS (Fulu) are the same as Electra, so the test vectors get - // loaded as Electra types (default serde behaviour for untagged enums). - Fulu, + // Placeholder for future feature-gated forks + // Add new feature-gated forks here before they are incorporated into a main fork + #[doc(hidden)] + __Placeholder, } impl FeatureName { pub fn list_all() -> Vec { - vec![FeatureName::Fulu] + vec![] } /// `ForkName` to use when running the feature tests. pub fn fork_name(&self) -> ForkName { match self { - FeatureName::Fulu => ForkName::Electra, + FeatureName::__Placeholder => unreachable!("Placeholder variant should never be used"), } } } impl Display for FeatureName { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, _f: &mut Formatter<'_>) -> std::fmt::Result { match self { - FeatureName::Fulu => f.write_str("fulu"), + FeatureName::__Placeholder => unreachable!("Placeholder variant should never be used"), } } } diff --git a/testing/ef_tests/src/cases/common.rs b/testing/ef_tests/src/cases/common.rs index 62f834820f..f63380cc33 100644 --- a/testing/ef_tests/src/cases/common.rs +++ b/testing/ef_tests/src/cases/common.rs @@ -1,8 +1,11 @@ -use serde::Deserialize; +use context_deserialize::ContextDeserialize; +use serde::{Deserialize, Deserializer}; use ssz::Encode; use ssz_derive::{Decode, Encode}; use std::fmt::Debug; -use types::ForkName; +use std::marker::PhantomData; +use tree_hash::TreeHash; +use types::{DataColumnsByRootIdentifier, EthSpec, ForkName, Hash256}; /// Macro to wrap U128 and U256 so they deserialize correctly. macro_rules! uint_wrapper { @@ -40,6 +43,15 @@ macro_rules! uint_wrapper { self.x.tree_hash_root() } } + + impl<'de, T> ContextDeserialize<'de, T> for $wrapper_name { + fn context_deserialize(deserializer: D, _context: T) -> Result + where + D: Deserializer<'de>, + { + <$wrapper_name>::deserialize(deserializer) + } + } }; } @@ -47,26 +59,63 @@ uint_wrapper!(DecimalU128, alloy_primitives::U128); uint_wrapper!(DecimalU256, alloy_primitives::U256); /// Trait for types that can be used in SSZ static tests. -pub trait SszStaticType: - serde::de::DeserializeOwned + Encode + Clone + PartialEq + Debug + Sync -{ +pub trait SszStaticType: Encode + Clone + PartialEq + Debug + Sync {} + +impl SszStaticType for T where T: Encode + Clone + PartialEq + Debug + Sync {} + +/// We need the `EthSpec` to implement `LoadCase` for this type, in order to work out the +/// ChainSpec. +/// +/// No other type currently requires this kind of context. +#[derive(Debug, Encode, Clone, PartialEq)] +#[ssz(struct_behaviour = "transparent")] +pub struct DataColumnsByRootIdentifierWrapper { + pub value: DataColumnsByRootIdentifier, + // SSZ derive is a bit buggy and requires skip_deserializing for transparent to work. + #[ssz(skip_serializing, skip_deserializing)] + pub _phantom: PhantomData, } -impl SszStaticType for T where - T: serde::de::DeserializeOwned + Encode + Clone + PartialEq + Debug + Sync +impl<'de, E: EthSpec> ContextDeserialize<'de, (ForkName, usize)> + for DataColumnsByRootIdentifierWrapper { + fn context_deserialize(deserializer: D, context: (ForkName, usize)) -> Result + where + D: Deserializer<'de>, + { + let value = DataColumnsByRootIdentifier::context_deserialize(deserializer, context)?; + Ok(DataColumnsByRootIdentifierWrapper { + value, + _phantom: PhantomData, + }) + } } -/// Return the fork immediately prior to a fork. -pub fn previous_fork(fork_name: ForkName) -> ForkName { - match fork_name { - ForkName::Base => ForkName::Base, - ForkName::Altair => ForkName::Base, - ForkName::Bellatrix => ForkName::Altair, - ForkName::Capella => ForkName::Bellatrix, - ForkName::Deneb => ForkName::Capella, - ForkName::Electra => ForkName::Deneb, - ForkName::Fulu => ForkName::Electra, +// We can delete this if we ever get `tree_hash(struct_behaviour = "transparent")`. +impl TreeHash for DataColumnsByRootIdentifierWrapper { + fn tree_hash_type() -> tree_hash::TreeHashType { + DataColumnsByRootIdentifier::tree_hash_type() + } + + fn tree_hash_packed_encoding(&self) -> tree_hash::PackedEncoding { + self.value.tree_hash_packed_encoding() + } + + fn tree_hash_packing_factor() -> usize { + DataColumnsByRootIdentifier::tree_hash_packing_factor() + } + + fn tree_hash_root(&self) -> Hash256 { + self.value.tree_hash_root() + } +} + +impl From for DataColumnsByRootIdentifierWrapper { + fn from(value: DataColumnsByRootIdentifier) -> Self { + Self { + value, + _phantom: PhantomData, + } } } diff --git a/testing/ef_tests/src/cases/fork.rs b/testing/ef_tests/src/cases/fork.rs index 85301e22f6..cae4fcf405 100644 --- a/testing/ef_tests/src/cases/fork.rs +++ b/testing/ef_tests/src/cases/fork.rs @@ -1,6 +1,5 @@ use super::*; use crate::case_result::compare_beacon_state_results_without_caches; -use crate::cases::common::previous_fork; use crate::decode::{ssz_decode_state, yaml_decode_file}; use serde::Deserialize; use state_processing::upgrade::{ @@ -33,7 +32,10 @@ impl LoadCase for ForkTest { assert_eq!(metadata.fork_name(), fork_name); // Decode pre-state with previous fork. - let pre_spec = &previous_fork(fork_name).make_genesis_spec(E::default_spec()); + let pre_spec = &fork_name + .previous_fork() + .unwrap_or(ForkName::Base) + .make_genesis_spec(E::default_spec()); let pre = ssz_decode_state(&path.join("pre.ssz_snappy"), pre_spec)?; // Decode post-state with target fork. diff --git a/testing/ef_tests/src/cases/kzg_compute_cells.rs b/testing/ef_tests/src/cases/kzg_compute_cells.rs new file mode 100644 index 0000000000..bd7f3649d6 --- /dev/null +++ b/testing/ef_tests/src/cases/kzg_compute_cells.rs @@ -0,0 +1,54 @@ +use super::*; +use crate::case_result::compare_result; +use kzg::Cell; +use serde::Deserialize; +use std::marker::PhantomData; + +#[derive(Debug, Clone, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct KZGComputeCellsInput { + pub blob: String, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(bound = "E: EthSpec", deny_unknown_fields)] +pub struct KZGComputeCells { + pub input: KZGComputeCellsInput, + pub output: Option>, + #[serde(skip)] + _phantom: PhantomData, +} + +impl LoadCase for KZGComputeCells { + fn load_from_dir(path: &Path, _fork_name: ForkName) -> Result { + decode::yaml_decode_file(path.join("data.yaml").as_path()) + } +} + +impl Case for KZGComputeCells { + fn is_enabled_for_fork(fork_name: ForkName) -> bool { + fork_name.fulu_enabled() + } + + fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> { + let cells = parse_blob::(&self.input.blob) + .and_then(|blob| { + let blob = blob.as_ref().try_into().map_err(|e| { + Error::InternalError(format!("Failed to convert blob to kzg blob: {e:?}")) + })?; + let kzg = get_kzg(); + kzg.compute_cells(blob).map_err(|e| { + Error::InternalError(format!("Failed to compute cells and kzg proofs: {e:?}")) + }) + }) + .map(|cells| cells.to_vec()); + + let expected = self.output.as_ref().map(|cells| { + parse_cells_and_proofs(cells, &[]) + .map(|(cells, _)| cells) + .expect("Valid cells") + }); + + compare_result::, _>(&cells, &expected) + } +} diff --git a/testing/ef_tests/src/cases/operations.rs b/testing/ef_tests/src/cases/operations.rs index 7178edb151..0c7c3d087c 100644 --- a/testing/ef_tests/src/cases/operations.rs +++ b/testing/ef_tests/src/cases/operations.rs @@ -22,10 +22,11 @@ use state_processing::{ ConsensusContext, }; use std::fmt::Debug; +use std::path::PathBuf; use types::{ Attestation, AttesterSlashing, BeaconBlock, BeaconBlockBody, BeaconBlockBodyBellatrix, - BeaconBlockBodyCapella, BeaconBlockBodyDeneb, BeaconBlockBodyElectra, BeaconState, - BlindedPayload, ConsolidationRequest, Deposit, DepositRequest, ExecutionPayload, + BeaconBlockBodyCapella, BeaconBlockBodyDeneb, BeaconBlockBodyElectra, BeaconBlockBodyFulu, + BeaconState, BlindedPayload, ConsolidationRequest, Deposit, DepositRequest, ExecutionPayload, ForkVersionDecode, FullPayload, ProposerSlashing, SignedBlsToExecutionChange, SignedVoluntaryExit, SyncAggregate, WithdrawalRequest, }; @@ -49,6 +50,7 @@ pub struct WithdrawalsPayload { #[derive(Debug, Clone)] pub struct Operations> { + path: PathBuf, metadata: Metadata, execution_metadata: Option, pub pre: BeaconState, @@ -357,8 +359,8 @@ impl Operation for BeaconBlockBody> { BeaconBlockBody::Electra(inner.clone_as_blinded()) } ForkName::Fulu => { - let inner = >>::from_ssz_bytes(bytes)?; - BeaconBlockBody::Electra(inner.clone_as_blinded()) + let inner = >>::from_ssz_bytes(bytes)?; + BeaconBlockBody::Fulu(inner.clone_as_blinded()) } _ => panic!(), }) @@ -555,6 +557,7 @@ impl> LoadCase for Operations { }; Ok(Self { + path: path.into(), metadata, execution_metadata, pre, @@ -574,6 +577,17 @@ impl> Case for Operations { } fn result(&self, _case_index: usize, fork_name: ForkName) -> Result<(), Error> { + // FIXME(das): remove this once v1.6.0-alpha.1 is released + // We are ahead of the v1.6.0-alpha.0 spec in our implementation of + // `get_max_blobs_per_block`, so we fail the execution payload test which expects the + // empty blob schedule to generate an error. + if O::handler_name() == "execution_payload" + && fork_name == ForkName::Fulu + && self.path.ends_with("invalid_exceed_max_blobs_per_block") + { + return Err(Error::SkippedKnownFailure); + } + let spec = &testing_spec::(fork_name); let mut pre_state = self.pre.clone(); diff --git a/testing/ef_tests/src/cases/ssz_generic.rs b/testing/ef_tests/src/cases/ssz_generic.rs index 3dc2f17968..96627472ba 100644 --- a/testing/ef_tests/src/cases/ssz_generic.rs +++ b/testing/ef_tests/src/cases/ssz_generic.rs @@ -3,7 +3,9 @@ use super::*; use crate::cases::common::{DecimalU128, DecimalU256, SszStaticType}; use crate::cases::ssz_static::{check_serialization, check_tree_hash}; -use crate::decode::{log_file_access, snappy_decode_file, yaml_decode_file}; +use crate::decode::{context_yaml_decode_file, log_file_access, snappy_decode_file}; +use context_deserialize::ContextDeserialize; +use context_deserialize_derive::context_deserialize; use serde::{de::Error as SerdeError, Deserialize, Deserializer}; use ssz_derive::{Decode, Encode}; use tree_hash::TreeHash; @@ -12,6 +14,7 @@ use types::typenum::*; use types::{BitList, BitVector, FixedVector, ForkName, VariableList, Vector}; #[derive(Debug, Clone, Deserialize)] +#[context_deserialize(ForkName)] struct Metadata { root: String, #[serde(rename(deserialize = "signing_root"))] @@ -118,7 +121,7 @@ macro_rules! type_dispatch { } impl Case for SszGeneric { - fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> { + fn result(&self, _case_index: usize, fork_name: ForkName) -> Result<(), Error> { let parts = self.case_name.split('_').collect::>(); match self.handler_name.as_str() { @@ -134,7 +137,7 @@ impl Case for SszGeneric { type_dispatch!( ssz_generic_test, - (&self.path), + (&self.path, fork_name), Vector, <>, [elem_ty => primitive_type] @@ -142,7 +145,7 @@ impl Case for SszGeneric { )?; type_dispatch!( ssz_generic_test, - (&self.path), + (&self.path, fork_name), FixedVector, <>, [elem_ty => primitive_type] @@ -159,7 +162,7 @@ impl Case for SszGeneric { type_dispatch!( ssz_generic_test, - (&self.path), + (&self.path, fork_name), BitList, <>, [limit => typenum] @@ -170,21 +173,21 @@ impl Case for SszGeneric { type_dispatch!( ssz_generic_test, - (&self.path), + (&self.path, fork_name), BitVector, <>, [length => typenum] )?; } "boolean" => { - ssz_generic_test::(&self.path)?; + ssz_generic_test::(&self.path, fork_name)?; } "uints" => { let type_name = "uint".to_owned() + parts[1]; type_dispatch!( ssz_generic_test, - (&self.path), + (&self.path, fork_name), _, <>, [type_name.as_str() => primitive_type] @@ -195,7 +198,7 @@ impl Case for SszGeneric { type_dispatch!( ssz_generic_test, - (&self.path), + (&self.path, fork_name), _, <>, [type_name => test_container] @@ -207,10 +210,15 @@ impl Case for SszGeneric { } } -fn ssz_generic_test(path: &Path) -> Result<(), Error> { +fn ssz_generic_test< + T: SszStaticType + for<'de> ContextDeserialize<'de, ForkName> + TreeHash + ssz::Decode, +>( + path: &Path, + fork_name: ForkName, +) -> Result<(), Error> { let meta_path = path.join("meta.yaml"); let meta: Option = if meta_path.is_file() { - Some(yaml_decode_file(&meta_path)?) + Some(context_yaml_decode_file(&meta_path, fork_name)?) } else { None }; @@ -220,7 +228,7 @@ fn ssz_generic_test(path: &Path) -> R let value_path = path.join("value.yaml"); let value: Option = if value_path.is_file() { - Some(yaml_decode_file(&value_path)?) + Some(context_yaml_decode_file(&value_path, fork_name)?) } else { None }; @@ -246,17 +254,20 @@ fn ssz_generic_test(path: &Path) -> R // Containers for SSZ generic tests #[derive(Debug, Clone, Default, PartialEq, Decode, Encode, TreeHash, Deserialize)] +#[context_deserialize(ForkName)] struct SingleFieldTestStruct { A: u8, } #[derive(Debug, Clone, Default, PartialEq, Decode, Encode, TreeHash, Deserialize)] +#[context_deserialize(ForkName)] struct SmallTestStruct { A: u16, B: u16, } #[derive(Debug, Clone, Default, PartialEq, Decode, Encode, TreeHash, Deserialize)] +#[context_deserialize(ForkName)] struct FixedTestStruct { A: u8, B: u64, @@ -264,6 +275,7 @@ struct FixedTestStruct { } #[derive(Debug, Clone, Default, PartialEq, Decode, Encode, TreeHash, Deserialize)] +#[context_deserialize(ForkName)] struct VarTestStruct { A: u16, B: VariableList, @@ -271,6 +283,7 @@ struct VarTestStruct { } #[derive(Debug, Clone, Default, PartialEq, Decode, Encode, TreeHash, Deserialize)] +#[context_deserialize(ForkName)] struct ComplexTestStruct { A: u16, B: VariableList, @@ -283,6 +296,7 @@ struct ComplexTestStruct { } #[derive(Debug, Clone, PartialEq, Decode, Encode, TreeHash, Deserialize)] +#[context_deserialize(ForkName)] struct BitsStruct { A: BitList, B: BitVector, diff --git a/testing/ef_tests/src/cases/ssz_static.rs b/testing/ef_tests/src/cases/ssz_static.rs index c80977a8ac..b02b9597bb 100644 --- a/testing/ef_tests/src/cases/ssz_static.rs +++ b/testing/ef_tests/src/cases/ssz_static.rs @@ -1,10 +1,12 @@ use super::*; use crate::case_result::compare_result; -use crate::decode::{snappy_decode_file, yaml_decode_file}; +use crate::cases::common::DataColumnsByRootIdentifierWrapper; +use crate::decode::{context_yaml_decode_file, snappy_decode_file, yaml_decode_file}; +use context_deserialize::ContextDeserialize; use serde::Deserialize; use ssz::Decode; use tree_hash::TreeHash; -use types::{BeaconBlock, BeaconState, Hash256, SignedBeaconBlock}; +use types::{BeaconBlock, BeaconState, DataColumnsByRootIdentifier, Hash256, SignedBeaconBlock}; #[derive(Debug, Clone, Deserialize)] struct SszStaticRoots { @@ -37,18 +39,28 @@ pub struct SszStaticWithSpec { value: T, } -fn load_from_dir(path: &Path) -> Result<(SszStaticRoots, Vec, T), Error> { +fn load_from_dir ContextDeserialize<'de, ForkName>>( + path: &Path, + fork_name: ForkName, +) -> Result<(SszStaticRoots, Vec, T), Error> { + load_from_dir_with_context(path, fork_name) +} + +fn load_from_dir_with_context ContextDeserialize<'de, C>, C>( + path: &Path, + context: C, +) -> Result<(SszStaticRoots, Vec, T), Error> { let roots = yaml_decode_file(&path.join("roots.yaml"))?; let serialized = snappy_decode_file(&path.join("serialized.ssz_snappy")) .expect("serialized.ssz_snappy exists"); - let value = yaml_decode_file(&path.join("value.yaml"))?; + let value = context_yaml_decode_file(&path.join("value.yaml"), context)?; Ok((roots, serialized, value)) } -impl LoadCase for SszStatic { - fn load_from_dir(path: &Path, _fork_name: ForkName) -> Result { - load_from_dir(path).map(|(roots, serialized, value)| Self { +impl ContextDeserialize<'de, ForkName>> LoadCase for SszStatic { + fn load_from_dir(path: &Path, fork_name: ForkName) -> Result { + load_from_dir(path, fork_name).map(|(roots, serialized, value)| Self { roots, serialized, value, @@ -56,19 +68,9 @@ impl LoadCase for SszStatic { } } -impl LoadCase for SszStaticTHC { - fn load_from_dir(path: &Path, _fork_name: ForkName) -> Result { - load_from_dir(path).map(|(roots, serialized, value)| Self { - roots, - serialized, - value, - }) - } -} - -impl LoadCase for SszStaticWithSpec { - fn load_from_dir(path: &Path, _fork_name: ForkName) -> Result { - load_from_dir(path).map(|(roots, serialized, value)| Self { +impl ContextDeserialize<'de, ForkName>> LoadCase for SszStaticTHC { + fn load_from_dir(path: &Path, fork_name: ForkName) -> Result { + load_from_dir(path, fork_name).map(|(roots, serialized, value)| Self { roots, serialized, value, @@ -124,6 +126,16 @@ impl Case for SszStaticTHC> { } } +impl LoadCase for SszStaticWithSpec> { + fn load_from_dir(path: &Path, fork_name: ForkName) -> Result { + load_from_dir(path, fork_name).map(|(roots, serialized, value)| Self { + roots, + serialized, + value, + }) + } +} + impl Case for SszStaticWithSpec> { fn result(&self, _case_index: usize, fork_name: ForkName) -> Result<(), Error> { let spec = &testing_spec::(fork_name); @@ -135,6 +147,16 @@ impl Case for SszStaticWithSpec> { } } +impl LoadCase for SszStaticWithSpec> { + fn load_from_dir(path: &Path, fork_name: ForkName) -> Result { + load_from_dir(path, fork_name).map(|(roots, serialized, value)| Self { + roots, + serialized, + value, + }) + } +} + impl Case for SszStaticWithSpec> { fn result(&self, _case_index: usize, fork_name: ForkName) -> Result<(), Error> { let spec = &testing_spec::(fork_name); @@ -145,3 +167,27 @@ impl Case for SszStaticWithSpec> { Ok(()) } } + +impl LoadCase for SszStaticWithSpec> { + fn load_from_dir(path: &Path, fork_name: ForkName) -> Result { + let spec = &testing_spec::(fork_name); + let context = (fork_name, spec.number_of_columns as usize); + load_from_dir_with_context(path, context).map(|(roots, serialized, value)| Self { + roots, + serialized, + value, + }) + } +} + +impl Case for SszStaticWithSpec> { + fn result(&self, _case_index: usize, fork_name: ForkName) -> Result<(), Error> { + let spec = &testing_spec::(fork_name); + check_serialization(&self.value, &self.serialized, |bytes| { + DataColumnsByRootIdentifier::from_ssz_bytes(bytes, spec.number_of_columns as usize) + .map(Into::into) + })?; + check_tree_hash(&self.roots.root, self.value.tree_hash_root().as_slice())?; + Ok(()) + } +} diff --git a/testing/ef_tests/src/decode.rs b/testing/ef_tests/src/decode.rs index eb88ac6af1..2074ffce23 100644 --- a/testing/ef_tests/src/decode.rs +++ b/testing/ef_tests/src/decode.rs @@ -1,4 +1,5 @@ use super::*; +use context_deserialize::ContextDeserialize; use fs2::FileExt; use snap::raw::Decoder; use std::fs::{self}; @@ -35,6 +36,27 @@ pub fn yaml_decode(string: &str) -> Result(string: &'de str, context: C) -> Result +where + T: ContextDeserialize<'de, C>, +{ + let deserializer = serde_yaml::Deserializer::from_str(string); + T::context_deserialize(deserializer, context) + .map_err(|e| Error::FailedToParseTest(format!("{:?}", e))) +} + +pub fn context_yaml_decode_file(path: &Path, context: C) -> Result +where + T: for<'de> ContextDeserialize<'de, C>, +{ + log_file_access(path); + fs::read_to_string(path) + .map_err(|e| { + Error::FailedToParseTest(format!("Unable to load {}: {:?}", path.display(), e)) + }) + .and_then(|s| context_yaml_decode(&s, context)) +} + pub fn yaml_decode_file(path: &Path) -> Result { log_file_access(path); fs::read_to_string(path) diff --git a/testing/ef_tests/src/handler.rs b/testing/ef_tests/src/handler.rs index a375498239..fd2bea6e8e 100644 --- a/testing/ef_tests/src/handler.rs +++ b/testing/ef_tests/src/handler.rs @@ -1,6 +1,7 @@ use crate::cases::{self, Case, Cases, EpochTransition, LoadCase, Operation}; use crate::type_name::TypeName; use crate::{type_name, FeatureName}; +use context_deserialize::ContextDeserialize; use derivative::Derivative; use std::fs::{self, DirEntry}; use std::marker::PhantomData; @@ -21,7 +22,7 @@ pub trait Handler { // Add forks here to exclude them from EF spec testing. Helpful for adding future or // unspecified forks. fn disabled_forks(&self) -> Vec { - vec![ForkName::Fulu] + vec![] } fn is_enabled_for_fork(&self, fork_name: ForkName) -> bool { @@ -50,6 +51,19 @@ pub trait Handler { } } + // Do NOT override this function. + // TODO: use default keyword when stable. + fn rayon_enabled() -> bool { + #[cfg(feature = "disable_rayon")] + { + false + } + #[cfg(not(feature = "disable_rayon"))] + { + Self::use_rayon() + } + } + fn use_rayon() -> bool { true } @@ -85,7 +99,7 @@ pub trait Handler { }) .collect(); - let results = Cases { test_cases }.test_results(fork_name, Self::use_rayon()); + let results = Cases { test_cases }.test_results(fork_name, Self::rayon_enabled()); let name = format!( "{}/{}/{}", @@ -127,7 +141,7 @@ pub trait Handler { }) .collect(); - let results = Cases { test_cases }.test_results(fork_name, Self::use_rayon()); + let results = Cases { test_cases }.test_results(fork_name, Self::rayon_enabled()); let name = format!( "{}/{}/{}", @@ -205,7 +219,7 @@ macro_rules! bls_handler { }) .collect(); - let results = Cases { test_cases }.test_results(fork_name, Self::use_rayon()); + let results = Cases { test_cases }.test_results(fork_name, Self::rayon_enabled()); let name = format!( "{}/{}/{}", @@ -327,13 +341,37 @@ impl SszStaticHandler { pub struct SszStaticTHCHandler(PhantomData<(T, E)>); /// Handler for SSZ types that don't implement `ssz::Decode`. -#[derive(Derivative)] -#[derivative(Default(bound = ""))] -pub struct SszStaticWithSpecHandler(PhantomData<(T, E)>); +pub struct SszStaticWithSpecHandler { + supported_forks: Vec, + _phantom: PhantomData<(T, E)>, +} + +impl Default for SszStaticWithSpecHandler { + fn default() -> Self { + Self::for_forks(ForkName::list_all()) + } +} + +impl SszStaticWithSpecHandler { + pub fn for_forks(supported_forks: Vec) -> Self { + SszStaticWithSpecHandler { + supported_forks, + _phantom: PhantomData, + } + } + + pub fn fulu_and_later() -> Self { + Self::for_forks(ForkName::list_all()[6..].to_vec()) + } +} impl Handler for SszStaticHandler where - T: cases::SszStaticType + tree_hash::TreeHash + ssz::Decode + TypeName, + T: cases::SszStaticType + + for<'de> ContextDeserialize<'de, ForkName> + + tree_hash::TreeHash + + ssz::Decode + + TypeName, E: TypeName, { type Case = cases::SszStatic; @@ -353,25 +391,6 @@ where fn is_enabled_for_fork(&self, fork_name: ForkName) -> bool { self.supported_forks.contains(&fork_name) } - - fn is_enabled_for_feature(&self, feature_name: FeatureName) -> bool { - // TODO(fulu): to be removed once Fulu types start differing from Electra. We currently run Fulu tests as a - // "feature" - this means we use Electra types for Fulu SSZ tests (except for PeerDAS types, e.g. `DataColumnSidecar`). - // - // This ensures we only run the tests **once** for `Fulu`, using the types matching the - // correct fork, e.g. `Fulu` uses SSZ types from `Electra` as of spec test version - // `v1.5.0-beta.0`, therefore the `Fulu` tests should get included when testing Deneb types. - // - // e.g. Fulu test vectors are executed in the 2nd line below, but excluded in the 1st - // line when testing the type `AttestationElectra`: - // - // ``` - // SszStaticHandler::, MainnetEthSpec>::pre_electra().run(); - // SszStaticHandler::, MainnetEthSpec>::electra_only().run(); - // ``` - feature_name == FeatureName::Fulu - && self.supported_forks.contains(&feature_name.fork_name()) - } } impl Handler for SszStaticTHCHandler, E> @@ -391,10 +410,6 @@ where fn handler_name(&self) -> String { BeaconState::::name().into() } - - fn is_enabled_for_feature(&self, feature_name: FeatureName) -> bool { - feature_name == FeatureName::Fulu - } } impl Handler for SszStaticWithSpecHandler @@ -417,8 +432,8 @@ where T::name().into() } - fn is_enabled_for_feature(&self, feature_name: FeatureName) -> bool { - feature_name == FeatureName::Fulu + fn is_enabled_for_fork(&self, fork_name: ForkName) -> bool { + self.supported_forks.contains(&fork_name) } } @@ -898,10 +913,6 @@ impl Handler for GetCustodyGroupsHandler { fn handler_name(&self) -> String { "get_custody_groups".into() } - - fn is_enabled_for_feature(&self, feature_name: FeatureName) -> bool { - feature_name == FeatureName::Fulu - } } #[derive(Derivative)] @@ -922,9 +933,25 @@ impl Handler for ComputeColumnsForCustodyGroupHandler fn handler_name(&self) -> String { "compute_columns_for_custody_group".into() } +} - fn is_enabled_for_feature(&self, feature_name: FeatureName) -> bool { - feature_name == FeatureName::Fulu +#[derive(Derivative)] +#[derivative(Default(bound = ""))] +pub struct KZGComputeCellsHandler(PhantomData); + +impl Handler for KZGComputeCellsHandler { + type Case = cases::KZGComputeCells; + + fn config_name() -> &'static str { + "general" + } + + fn runner_name() -> &'static str { + "kzg" + } + + fn handler_name(&self) -> String { + "compute_cells".into() } } @@ -946,10 +973,6 @@ impl Handler for KZGComputeCellsAndKZGProofHandler { fn handler_name(&self) -> String { "compute_cells_and_kzg_proofs".into() } - - fn is_enabled_for_feature(&self, feature_name: FeatureName) -> bool { - feature_name == FeatureName::Fulu - } } #[derive(Derivative)] @@ -970,10 +993,6 @@ impl Handler for KZGVerifyCellKZGProofBatchHandler { fn handler_name(&self) -> String { "verify_cell_kzg_proof_batch".into() } - - fn is_enabled_for_feature(&self, feature_name: FeatureName) -> bool { - feature_name == FeatureName::Fulu - } } #[derive(Derivative)] @@ -994,10 +1013,6 @@ impl Handler for KZGRecoverCellsAndKZGProofHandler { fn handler_name(&self) -> String { "recover_cells_and_kzg_proofs".into() } - - fn is_enabled_for_feature(&self, feature_name: FeatureName) -> bool { - feature_name == FeatureName::Fulu - } } #[derive(Derivative)] @@ -1022,10 +1037,6 @@ impl Handler for KzgInclusionMerkleProofValidityHandler bool { fork_name.deneb_enabled() } - - fn is_enabled_for_feature(&self, feature_name: FeatureName) -> bool { - feature_name == FeatureName::Fulu - } } #[derive(Derivative)] @@ -1073,7 +1084,8 @@ impl Handler for LightClientUpdateHandler { fn is_enabled_for_fork(&self, fork_name: ForkName) -> bool { // Enabled in Altair - fork_name.altair_enabled() + // No test in Fulu yet. + fork_name.altair_enabled() && fork_name != ForkName::Fulu } } diff --git a/testing/ef_tests/src/lib.rs b/testing/ef_tests/src/lib.rs index e7367719d7..de255c2c73 100644 --- a/testing/ef_tests/src/lib.rs +++ b/testing/ef_tests/src/lib.rs @@ -1,11 +1,11 @@ pub use case_result::CaseResult; pub use cases::WithdrawalsPayload; pub use cases::{ - Case, EffectiveBalanceUpdates, Eth1DataReset, FeatureName, HistoricalRootsUpdate, - HistoricalSummariesUpdate, InactivityUpdates, JustificationAndFinalization, - ParticipationFlagUpdates, ParticipationRecordUpdates, PendingBalanceDeposits, - PendingConsolidations, RandaoMixesReset, RegistryUpdates, RewardsAndPenalties, Slashings, - SlashingsReset, SyncCommitteeUpdates, + Case, DataColumnsByRootIdentifierWrapper, EffectiveBalanceUpdates, Eth1DataReset, FeatureName, + HistoricalRootsUpdate, HistoricalSummariesUpdate, InactivityUpdates, + JustificationAndFinalization, ParticipationFlagUpdates, ParticipationRecordUpdates, + PendingBalanceDeposits, PendingConsolidations, RandaoMixesReset, RegistryUpdates, + RewardsAndPenalties, Slashings, SlashingsReset, SyncCommitteeUpdates, }; pub use decode::log_file_access; pub use error::Error; diff --git a/testing/ef_tests/src/type_name.rs b/testing/ef_tests/src/type_name.rs index 387e77310d..b5b2c424d8 100644 --- a/testing/ef_tests/src/type_name.rs +++ b/testing/ef_tests/src/type_name.rs @@ -1,4 +1,5 @@ //! Mapping from types to canonical string identifiers used in testing. +use crate::DataColumnsByRootIdentifierWrapper; use types::historical_summary::HistoricalSummary; use types::*; @@ -59,6 +60,10 @@ type_name!(BeaconBlockHeader); type_name_generic!(BeaconState); type_name!(BlobIdentifier); type_name!(DataColumnsByRootIdentifier); +type_name_generic!( + DataColumnsByRootIdentifierWrapper, + "DataColumnsByRootIdentifier" +); type_name_generic!(BlobSidecar); type_name_generic!(DataColumnSidecar); type_name!(Checkpoint); diff --git a/testing/ef_tests/tests/tests.rs b/testing/ef_tests/tests/tests.rs index d333cdbb11..8842ec2852 100644 --- a/testing/ef_tests/tests/tests.rs +++ b/testing/ef_tests/tests/tests.rs @@ -238,7 +238,8 @@ macro_rules! ssz_static_test_no_run { #[cfg(feature = "fake_crypto")] mod ssz_static { use ef_tests::{ - FeatureName, Handler, SszStaticHandler, SszStaticTHCHandler, SszStaticWithSpecHandler, + DataColumnsByRootIdentifierWrapper, Handler, SszStaticHandler, SszStaticTHCHandler, + SszStaticWithSpecHandler, }; use types::historical_summary::HistoricalSummary; use types::{ @@ -660,20 +661,24 @@ mod ssz_static { #[test] fn data_column_sidecar() { - SszStaticHandler::, MinimalEthSpec>::default() - .run_for_feature(FeatureName::Fulu); - SszStaticHandler::, MainnetEthSpec>::default() - .run_for_feature(FeatureName::Fulu); + SszStaticHandler::, MinimalEthSpec>::fulu_and_later() + .run(); + SszStaticHandler::, MainnetEthSpec>::fulu_and_later() + .run(); } #[test] - #[ignore] - // TODO(das): enable once EF tests are updated to latest release. fn data_column_by_root_identifier() { - // SszStaticHandler::::default() - // .run_for_feature(FeatureName::Fulu); - // SszStaticHandler::::default() - // .run_for_feature(FeatureName::Fulu); + SszStaticWithSpecHandler::< + DataColumnsByRootIdentifierWrapper, + MinimalEthSpec, + >::fulu_and_later() + .run(); + SszStaticWithSpecHandler::< + DataColumnsByRootIdentifierWrapper, + MainnetEthSpec, + >::fulu_and_later() + .run(); } #[test] @@ -941,6 +946,11 @@ fn kzg_verify_kzg_proof() { KZGVerifyKZGProofHandler::::default().run(); } +#[test] +fn kzg_compute_cells() { + KZGComputeCellsHandler::::default().run(); +} + #[test] fn kzg_compute_cells_and_proofs() { KZGComputeCellsAndKZGProofHandler::::default().run(); diff --git a/validator_manager/src/create_validators.rs b/validator_manager/src/create_validators.rs index c21ebeabf8..3216417c73 100644 --- a/validator_manager/src/create_validators.rs +++ b/validator_manager/src/create_validators.rs @@ -596,7 +596,7 @@ pub mod tests { type E = MainnetEthSpec; - const TEST_VECTOR_DEPOSIT_CLI_VERSION: &str = "2.7.0"; + const TEST_VECTOR_DEPOSIT_CLI_VERSION: &str = "1.2.2"; // Update to ethstaker-deposit-cli version fn junk_execution_address() -> Option
{ Some(Address::from_str("0x0f51bb10119727a7e5ea3538074fb341f56b09ad").unwrap()) @@ -882,7 +882,7 @@ pub mod tests { } #[tokio::test] - async fn staking_deposit_cli_vectors() { + async fn ethstaker_deposit_cli_vectors() { let vectors_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")) .join("test_vectors") .join("vectors"); diff --git a/validator_manager/test_vectors/generate.py b/validator_manager/test_vectors/generate.py index 45bb408eb1..4f584bd876 100644 --- a/validator_manager/test_vectors/generate.py +++ b/validator_manager/test_vectors/generate.py @@ -71,9 +71,9 @@ def setup_sdc(): ], cwd=sdc_git_dir) assert(result.returncode == 0) result = subprocess.run([ - "python", - "setup.py", + "pip", "install", + ".", ], cwd=sdc_git_dir) assert(result.returncode == 0) @@ -100,7 +100,9 @@ def sdc_generate(network, first_index, count, eth1_withdrawal_address=None): '--num_validators', str(count), '--mnemonic', TEST_MNEMONIC, '--chain', network, - '--keystore_password', 'MyPassword', + '--keystore_password', 'MyPassword1234', # minimum 12 characters for password + '--withdrawal_address', '', # no withdrawal address set so it maintains 0x00 withdrawal credentials + '--regular-withdrawal', # no compounding '--folder', os.path.abspath(output_dir), ] + eth1_flags diff --git a/validator_manager/test_vectors/vectors/holesky_first_0_count_1_eth1_false/validator_keys/deposit_data-1715584111.json b/validator_manager/test_vectors/vectors/holesky_first_0_count_1_eth1_false/validator_keys/deposit_data-1748939223.json similarity index 90% rename from validator_manager/test_vectors/vectors/holesky_first_0_count_1_eth1_false/validator_keys/deposit_data-1715584111.json rename to validator_manager/test_vectors/vectors/holesky_first_0_count_1_eth1_false/validator_keys/deposit_data-1748939223.json index 6b343d087a..b2c6085197 100644 --- a/validator_manager/test_vectors/vectors/holesky_first_0_count_1_eth1_false/validator_keys/deposit_data-1715584111.json +++ b/validator_manager/test_vectors/vectors/holesky_first_0_count_1_eth1_false/validator_keys/deposit_data-1748939223.json @@ -1 +1 @@ -[{"pubkey": "88b6b3a9b391fa5593e8bce8d06102df1a56248368086929709fbb4a8570dc6a560febeef8159b19789e9c1fd13572f0", "withdrawal_credentials": "0049b6188ed20314309f617dd4030b8ddfac3c6e65759a03c226a13b2fe4cc72", "amount": 32000000000, "signature": "846c83b1ec80038974ded0ef5b89d86c862a7bd4559c10528cd4bb6a48e71987f17a963bc6165a6f51c8b87474e64b450b549ce2d14a25bea3c86c241f3740f3d3edc3dc36fddbeadb1ec8969d7193da602270fea8dd31d3e64674aa2090b73d", "deposit_message_root": "a9bc1d21cc009d9b10782a07213e37592c0d235463ed0117dec755758da90d51", "deposit_data_root": "cdfe14518026e99b9dfa8a029054349e37d4632ee2bbed7c2f5af19a01912368", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "2.7.0"}] \ No newline at end of file +[{"pubkey": "88b6b3a9b391fa5593e8bce8d06102df1a56248368086929709fbb4a8570dc6a560febeef8159b19789e9c1fd13572f0", "withdrawal_credentials": "0049b6188ed20314309f617dd4030b8ddfac3c6e65759a03c226a13b2fe4cc72", "amount": 32000000000, "signature": "846c83b1ec80038974ded0ef5b89d86c862a7bd4559c10528cd4bb6a48e71987f17a963bc6165a6f51c8b87474e64b450b549ce2d14a25bea3c86c241f3740f3d3edc3dc36fddbeadb1ec8969d7193da602270fea8dd31d3e64674aa2090b73d", "deposit_message_root": "a9bc1d21cc009d9b10782a07213e37592c0d235463ed0117dec755758da90d51", "deposit_data_root": "cdfe14518026e99b9dfa8a029054349e37d4632ee2bbed7c2f5af19a01912368", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "1.2.2"}] \ No newline at end of file diff --git a/validator_manager/test_vectors/vectors/holesky_first_0_count_2_eth1_false/validator_keys/deposit_data-1715584114.json b/validator_manager/test_vectors/vectors/holesky_first_0_count_2_eth1_false/validator_keys/deposit_data-1748939227.json similarity index 90% rename from validator_manager/test_vectors/vectors/holesky_first_0_count_2_eth1_false/validator_keys/deposit_data-1715584114.json rename to validator_manager/test_vectors/vectors/holesky_first_0_count_2_eth1_false/validator_keys/deposit_data-1748939227.json index f70410746b..e12b813e3c 100644 --- a/validator_manager/test_vectors/vectors/holesky_first_0_count_2_eth1_false/validator_keys/deposit_data-1715584114.json +++ b/validator_manager/test_vectors/vectors/holesky_first_0_count_2_eth1_false/validator_keys/deposit_data-1748939227.json @@ -1 +1 @@ -[{"pubkey": "88b6b3a9b391fa5593e8bce8d06102df1a56248368086929709fbb4a8570dc6a560febeef8159b19789e9c1fd13572f0", "withdrawal_credentials": "0049b6188ed20314309f617dd4030b8ddfac3c6e65759a03c226a13b2fe4cc72", "amount": 32000000000, "signature": "846c83b1ec80038974ded0ef5b89d86c862a7bd4559c10528cd4bb6a48e71987f17a963bc6165a6f51c8b87474e64b450b549ce2d14a25bea3c86c241f3740f3d3edc3dc36fddbeadb1ec8969d7193da602270fea8dd31d3e64674aa2090b73d", "deposit_message_root": "a9bc1d21cc009d9b10782a07213e37592c0d235463ed0117dec755758da90d51", "deposit_data_root": "cdfe14518026e99b9dfa8a029054349e37d4632ee2bbed7c2f5af19a01912368", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "2.7.0"}, {"pubkey": "a33ab9d93fb53c4f027944aaa11a13be0c150b7cc2e379d85d1ed4db38d178b4e4ebeae05832158b8c746c1961da00ce", "withdrawal_credentials": "00ad3748cbd1adc855c2bdab431f7e755a21663f4f6447ac888e5855c588af5a", "amount": 32000000000, "signature": "997cff67c1675ecd2467ac050850ddec8b0488995abf363cee40cbe1461043acf4e68422e9731340437d566542e010cd186031dc0de30b2f56d19f3bb866e0fa9be31dd49ea27777f25ad786cc8587fb745598e5870647b6deeaab77fba4a9e4", "deposit_message_root": "c5271aba974c802ff5b02b11fa33b545d7f430ff3b85c0f9eeef4cd59d83abf3", "deposit_data_root": "8787f86d699426783983d03945a8ebe45b349118d28e8af528b9695887f98fac", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "2.7.0"}] \ No newline at end of file +[{"pubkey": "88b6b3a9b391fa5593e8bce8d06102df1a56248368086929709fbb4a8570dc6a560febeef8159b19789e9c1fd13572f0", "withdrawal_credentials": "0049b6188ed20314309f617dd4030b8ddfac3c6e65759a03c226a13b2fe4cc72", "amount": 32000000000, "signature": "846c83b1ec80038974ded0ef5b89d86c862a7bd4559c10528cd4bb6a48e71987f17a963bc6165a6f51c8b87474e64b450b549ce2d14a25bea3c86c241f3740f3d3edc3dc36fddbeadb1ec8969d7193da602270fea8dd31d3e64674aa2090b73d", "deposit_message_root": "a9bc1d21cc009d9b10782a07213e37592c0d235463ed0117dec755758da90d51", "deposit_data_root": "cdfe14518026e99b9dfa8a029054349e37d4632ee2bbed7c2f5af19a01912368", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "1.2.2"}, {"pubkey": "a33ab9d93fb53c4f027944aaa11a13be0c150b7cc2e379d85d1ed4db38d178b4e4ebeae05832158b8c746c1961da00ce", "withdrawal_credentials": "00ad3748cbd1adc855c2bdab431f7e755a21663f4f6447ac888e5855c588af5a", "amount": 32000000000, "signature": "997cff67c1675ecd2467ac050850ddec8b0488995abf363cee40cbe1461043acf4e68422e9731340437d566542e010cd186031dc0de30b2f56d19f3bb866e0fa9be31dd49ea27777f25ad786cc8587fb745598e5870647b6deeaab77fba4a9e4", "deposit_message_root": "c5271aba974c802ff5b02b11fa33b545d7f430ff3b85c0f9eeef4cd59d83abf3", "deposit_data_root": "8787f86d699426783983d03945a8ebe45b349118d28e8af528b9695887f98fac", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "1.2.2"}] \ No newline at end of file diff --git a/validator_manager/test_vectors/vectors/holesky_first_0_count_2_eth1_true/validator_keys/deposit_data-1715584129.json b/validator_manager/test_vectors/vectors/holesky_first_0_count_2_eth1_true/validator_keys/deposit_data-1748939246.json similarity index 90% rename from validator_manager/test_vectors/vectors/holesky_first_0_count_2_eth1_true/validator_keys/deposit_data-1715584129.json rename to validator_manager/test_vectors/vectors/holesky_first_0_count_2_eth1_true/validator_keys/deposit_data-1748939246.json index 9b2678651f..bdb31d8bf2 100644 --- a/validator_manager/test_vectors/vectors/holesky_first_0_count_2_eth1_true/validator_keys/deposit_data-1715584129.json +++ b/validator_manager/test_vectors/vectors/holesky_first_0_count_2_eth1_true/validator_keys/deposit_data-1748939246.json @@ -1 +1 @@ -[{"pubkey": "88b6b3a9b391fa5593e8bce8d06102df1a56248368086929709fbb4a8570dc6a560febeef8159b19789e9c1fd13572f0", "withdrawal_credentials": "0100000000000000000000000f51bb10119727a7e5ea3538074fb341f56b09ad", "amount": 32000000000, "signature": "a8eed5bb34dec5fdee4a3e68a774143072af0ebdae26a9b24ea0601d516a5eeb18aa2ec804be3f05f8475f2e472ce91809d93b7586c3a90fc8a7bbb63ad1f762eee3df0dc0ea3d33dd8ba782e48de495b3bc76e280658c1406e11d07db659e69", "deposit_message_root": "62967565d11471da4af7769911926cd1826124048036b25616216f99bc320f13", "deposit_data_root": "74ead0279baa86ed7106268e4806484eaae26a8f1c42f693e4b3cb626c724b63", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "2.7.0"}, {"pubkey": "a33ab9d93fb53c4f027944aaa11a13be0c150b7cc2e379d85d1ed4db38d178b4e4ebeae05832158b8c746c1961da00ce", "withdrawal_credentials": "0100000000000000000000000f51bb10119727a7e5ea3538074fb341f56b09ad", "amount": 32000000000, "signature": "8d87cdd627ed169114c00653fd3167e2afc917010071bbbbddd60e331ed0d0d7273cb4a887efe63e7b840bac713420d907e9dac20df56e50e7346b59e3acfe56753234a34c7ab3d8c40ea00b447db005b4b780701a0a2416c4fdadbdb18bf174", "deposit_message_root": "ce110433298ffb78d827d67dcc13655344a139cb7e3ce10b341937c0a76b25b7", "deposit_data_root": "978b04b76d0a56ff28beb8eb1859792e0967d0b51e4a31485d2078b8390954d2", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "2.7.0"}] \ No newline at end of file +[{"pubkey": "88b6b3a9b391fa5593e8bce8d06102df1a56248368086929709fbb4a8570dc6a560febeef8159b19789e9c1fd13572f0", "withdrawal_credentials": "0100000000000000000000000f51bb10119727a7e5ea3538074fb341f56b09ad", "amount": 32000000000, "signature": "a8eed5bb34dec5fdee4a3e68a774143072af0ebdae26a9b24ea0601d516a5eeb18aa2ec804be3f05f8475f2e472ce91809d93b7586c3a90fc8a7bbb63ad1f762eee3df0dc0ea3d33dd8ba782e48de495b3bc76e280658c1406e11d07db659e69", "deposit_message_root": "62967565d11471da4af7769911926cd1826124048036b25616216f99bc320f13", "deposit_data_root": "74ead0279baa86ed7106268e4806484eaae26a8f1c42f693e4b3cb626c724b63", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "1.2.2"}, {"pubkey": "a33ab9d93fb53c4f027944aaa11a13be0c150b7cc2e379d85d1ed4db38d178b4e4ebeae05832158b8c746c1961da00ce", "withdrawal_credentials": "0100000000000000000000000f51bb10119727a7e5ea3538074fb341f56b09ad", "amount": 32000000000, "signature": "8d87cdd627ed169114c00653fd3167e2afc917010071bbbbddd60e331ed0d0d7273cb4a887efe63e7b840bac713420d907e9dac20df56e50e7346b59e3acfe56753234a34c7ab3d8c40ea00b447db005b4b780701a0a2416c4fdadbdb18bf174", "deposit_message_root": "ce110433298ffb78d827d67dcc13655344a139cb7e3ce10b341937c0a76b25b7", "deposit_data_root": "978b04b76d0a56ff28beb8eb1859792e0967d0b51e4a31485d2078b8390954d2", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "1.2.2"}] \ No newline at end of file diff --git a/validator_manager/test_vectors/vectors/holesky_first_1024_count_3_eth1_false/validator_keys/deposit_data-1715584124.json b/validator_manager/test_vectors/vectors/holesky_first_1024_count_3_eth1_false/validator_keys/deposit_data-1748939241.json similarity index 87% rename from validator_manager/test_vectors/vectors/holesky_first_1024_count_3_eth1_false/validator_keys/deposit_data-1715584124.json rename to validator_manager/test_vectors/vectors/holesky_first_1024_count_3_eth1_false/validator_keys/deposit_data-1748939241.json index 997260bb87..aa7b311ef9 100644 --- a/validator_manager/test_vectors/vectors/holesky_first_1024_count_3_eth1_false/validator_keys/deposit_data-1715584124.json +++ b/validator_manager/test_vectors/vectors/holesky_first_1024_count_3_eth1_false/validator_keys/deposit_data-1748939241.json @@ -1 +1 @@ -[{"pubkey": "92ca8dddba4ae7ada6584c377fc53fb978ad9d5ee8db585b18e226c27682b326b3c68e10f5d99a453e233268c144e0ef", "withdrawal_credentials": "00dd4f8bfd1a48be288c2af8bb7315f6198900b5b3f56df010420d5328e682cb", "amount": 32000000000, "signature": "818141f1f2fdba651f6a3de4ed43c774974b6cec82b3e6c3fa00569b6b67a88c37742d0033275dc98b4bbaac875e48b416b89cebfd1fe9996e2a29c0a2c512d1cedff558420a1a2b50cf5c743a622d85d941b896b00520b3e9a3eaf1f5eff12c", "deposit_message_root": "5421d9177b4d035e6525506509ab702c5f458c53458dad437097b37cb8209b43", "deposit_data_root": "9c9f6ed171b93a08f4e1bc46c0a7feace6466e3e213c6c2d567428c73e22e242", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "2.7.0"}, {"pubkey": "86474cd2874663445ef0ee02aca81b2b942a383fd4c7085fa675388e26c67afc0fef44a8666d46f571723e349ae4a0cb", "withdrawal_credentials": "001c31aa161ed1d3c481c1ee8f3ad1853217296a15877917fe3c2f680580ac01", "amount": 32000000000, "signature": "b62103a32290ec8c710d48f3147895a2dddb25231c9ae38b8ca12bcaf30770a9fc632f4da6b3c5b7a43cfa6a9f096f5e13d26b2c68a42c1c86385aea268dcd2ad3cf766b3f01ee2ba19379ddae9c15830aac8acbef20accc82c734f4c40e5ffd", "deposit_message_root": "279271f7065c83868c37021c32c014516b21e6188fb2cee4e8543c5d38427698", "deposit_data_root": "37b75d75086f4b980c85c021ca22343008d445061714cff41d63aea4dca49a5f", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "2.7.0"}, {"pubkey": "997e27aa262238beb01464434694a466321b5270297bdfdb944b65a3b6617b6ce2613628ac35a8f4cf2e9b4b55c46ef8", "withdrawal_credentials": "0097fffee9cf9fd91a6fa89af90e73f1cb8b8a043e742afaeb2e57b83b0845fe", "amount": 32000000000, "signature": "af2dc295084b4a3eff01a52fe5d42aa931509c24328d5304e59026d0957b55bc35e64802a8d64fdb4a9700bf12e1d6bb184eba01682d8413d86b737e63d3d79a16243d9c8e00115a202efc889ef7129861d8aa32bf8ec9ef5305eecce87b2eda", "deposit_message_root": "187e177721bfdd8ea13cb52c8de2dead29164a0e093efb640457a0e6ac918191", "deposit_data_root": "fd0c081818d2ce1bc54b7979e9b348bbbdb8fe5904694143bf4b355dcbbde692", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "2.7.0"}] \ No newline at end of file +[{"pubkey": "92ca8dddba4ae7ada6584c377fc53fb978ad9d5ee8db585b18e226c27682b326b3c68e10f5d99a453e233268c144e0ef", "withdrawal_credentials": "00dd4f8bfd1a48be288c2af8bb7315f6198900b5b3f56df010420d5328e682cb", "amount": 32000000000, "signature": "818141f1f2fdba651f6a3de4ed43c774974b6cec82b3e6c3fa00569b6b67a88c37742d0033275dc98b4bbaac875e48b416b89cebfd1fe9996e2a29c0a2c512d1cedff558420a1a2b50cf5c743a622d85d941b896b00520b3e9a3eaf1f5eff12c", "deposit_message_root": "5421d9177b4d035e6525506509ab702c5f458c53458dad437097b37cb8209b43", "deposit_data_root": "9c9f6ed171b93a08f4e1bc46c0a7feace6466e3e213c6c2d567428c73e22e242", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "1.2.2"}, {"pubkey": "86474cd2874663445ef0ee02aca81b2b942a383fd4c7085fa675388e26c67afc0fef44a8666d46f571723e349ae4a0cb", "withdrawal_credentials": "001c31aa161ed1d3c481c1ee8f3ad1853217296a15877917fe3c2f680580ac01", "amount": 32000000000, "signature": "b62103a32290ec8c710d48f3147895a2dddb25231c9ae38b8ca12bcaf30770a9fc632f4da6b3c5b7a43cfa6a9f096f5e13d26b2c68a42c1c86385aea268dcd2ad3cf766b3f01ee2ba19379ddae9c15830aac8acbef20accc82c734f4c40e5ffd", "deposit_message_root": "279271f7065c83868c37021c32c014516b21e6188fb2cee4e8543c5d38427698", "deposit_data_root": "37b75d75086f4b980c85c021ca22343008d445061714cff41d63aea4dca49a5f", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "1.2.2"}, {"pubkey": "997e27aa262238beb01464434694a466321b5270297bdfdb944b65a3b6617b6ce2613628ac35a8f4cf2e9b4b55c46ef8", "withdrawal_credentials": "0097fffee9cf9fd91a6fa89af90e73f1cb8b8a043e742afaeb2e57b83b0845fe", "amount": 32000000000, "signature": "af2dc295084b4a3eff01a52fe5d42aa931509c24328d5304e59026d0957b55bc35e64802a8d64fdb4a9700bf12e1d6bb184eba01682d8413d86b737e63d3d79a16243d9c8e00115a202efc889ef7129861d8aa32bf8ec9ef5305eecce87b2eda", "deposit_message_root": "187e177721bfdd8ea13cb52c8de2dead29164a0e093efb640457a0e6ac918191", "deposit_data_root": "fd0c081818d2ce1bc54b7979e9b348bbbdb8fe5904694143bf4b355dcbbde692", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "1.2.2"}] \ No newline at end of file diff --git a/validator_manager/test_vectors/vectors/holesky_first_12_count_1_eth1_false/validator_keys/deposit_data-1715584117.json b/validator_manager/test_vectors/vectors/holesky_first_12_count_1_eth1_false/validator_keys/deposit_data-1748939232.json similarity index 90% rename from validator_manager/test_vectors/vectors/holesky_first_12_count_1_eth1_false/validator_keys/deposit_data-1715584117.json rename to validator_manager/test_vectors/vectors/holesky_first_12_count_1_eth1_false/validator_keys/deposit_data-1748939232.json index 4fa3724c59..344bc8e5c0 100644 --- a/validator_manager/test_vectors/vectors/holesky_first_12_count_1_eth1_false/validator_keys/deposit_data-1715584117.json +++ b/validator_manager/test_vectors/vectors/holesky_first_12_count_1_eth1_false/validator_keys/deposit_data-1748939232.json @@ -1 +1 @@ -[{"pubkey": "8b181759a027c09a409ef24f6b35db213982c2474e2017f3851d76b1c4e560a4238072f67a0c22cb667f940da4ea9ec9", "withdrawal_credentials": "00cbec90e8570679f565bd4645f73a078981067a705564283e61c93c81707842", "amount": 32000000000, "signature": "b687aa7d55752f00a060c21fa9287485bab94c841d96b3516263fb384a812c92e60ef9fa2e09add9f55db71961fc051e0bb83d214b6f31d04ee59eaba3b43e27eadd2a64884c5d4125a1f5bd6e1d930e5a1e420c278c697d4af6ed3fcdac16cf", "deposit_message_root": "fcdf3d94740766299a95b3e477e64abadff6ab8978400578f241c93eb367b938", "deposit_data_root": "54dc56d2838ca70bac89ca92ae1f8d04945d3305ce8507b390756b646163387a", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "2.7.0"}] \ No newline at end of file +[{"pubkey": "8b181759a027c09a409ef24f6b35db213982c2474e2017f3851d76b1c4e560a4238072f67a0c22cb667f940da4ea9ec9", "withdrawal_credentials": "00cbec90e8570679f565bd4645f73a078981067a705564283e61c93c81707842", "amount": 32000000000, "signature": "b687aa7d55752f00a060c21fa9287485bab94c841d96b3516263fb384a812c92e60ef9fa2e09add9f55db71961fc051e0bb83d214b6f31d04ee59eaba3b43e27eadd2a64884c5d4125a1f5bd6e1d930e5a1e420c278c697d4af6ed3fcdac16cf", "deposit_message_root": "fcdf3d94740766299a95b3e477e64abadff6ab8978400578f241c93eb367b938", "deposit_data_root": "54dc56d2838ca70bac89ca92ae1f8d04945d3305ce8507b390756b646163387a", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "1.2.2"}] \ No newline at end of file diff --git a/validator_manager/test_vectors/vectors/holesky_first_99_count_2_eth1_false/validator_keys/deposit_data-1715584120.json b/validator_manager/test_vectors/vectors/holesky_first_99_count_2_eth1_false/validator_keys/deposit_data-1748939236.json similarity index 90% rename from validator_manager/test_vectors/vectors/holesky_first_99_count_2_eth1_false/validator_keys/deposit_data-1715584120.json rename to validator_manager/test_vectors/vectors/holesky_first_99_count_2_eth1_false/validator_keys/deposit_data-1748939236.json index 7436b53f24..9dffddd89a 100644 --- a/validator_manager/test_vectors/vectors/holesky_first_99_count_2_eth1_false/validator_keys/deposit_data-1715584120.json +++ b/validator_manager/test_vectors/vectors/holesky_first_99_count_2_eth1_false/validator_keys/deposit_data-1748939236.json @@ -1 +1 @@ -[{"pubkey": "a57a4ed429e415b862cc758e75c93936e3f6339640d0763b969ba133a82c03717827fbdd8ec42fc862ed50e3b5b528dc", "withdrawal_credentials": "00864081ef2f5aec1aa667872615e25027f1fdc256a4948b6318cf75a8d635a3", "amount": 32000000000, "signature": "a59a2c510c5ce378b514f62550a7115cd6cfebaf73a5ba20c2cf21456a2d2c11d6e117b91d23743fc0361794cf7e5405030eb296926b526e8a2d68aa87569358e69d3884563a23770714730b6fab6ba639977d725a5ed4f29abe3ccc34575610", "deposit_message_root": "c08d0ecd085bc0f50c35f1b34d8b8937b2b9c8a172a9808de70f8d448c526f07", "deposit_data_root": "149a5dfbba87109dac65142cc067aed97c9579730488cfe16625be3ce4f753a6", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "2.7.0"}, {"pubkey": "a2801622bc391724989004b5de78cb85746f85a303572691ecc945d9f5c61ec512127e58482e0dfcb4de77be3294ab01", "withdrawal_credentials": "00edff674c66a7f58285554e700183aeee5e740691de8087f7ce4d81f3597108", "amount": 32000000000, "signature": "966ae45b81402f1155ff313e48ca3a5346264dcc4bc9ee9e69994ee74368852d9d27c1684752735feba6c21042ad366b13f12c6e772c453518900435d87e2d743e1818e7471cf3574598e3b085c4527f643efe679841ddf8a480cac12b2c6e08", "deposit_message_root": "f5a530bee9698c2447961ecd210184fbb130bbb8e8916988d802d47e3b147842", "deposit_data_root": "f44dac412ae36929a84f64d5f7f91cada908a8f9e837fc70628f58804591798d", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "2.7.0"}] \ No newline at end of file +[{"pubkey": "a57a4ed429e415b862cc758e75c93936e3f6339640d0763b969ba133a82c03717827fbdd8ec42fc862ed50e3b5b528dc", "withdrawal_credentials": "00864081ef2f5aec1aa667872615e25027f1fdc256a4948b6318cf75a8d635a3", "amount": 32000000000, "signature": "a59a2c510c5ce378b514f62550a7115cd6cfebaf73a5ba20c2cf21456a2d2c11d6e117b91d23743fc0361794cf7e5405030eb296926b526e8a2d68aa87569358e69d3884563a23770714730b6fab6ba639977d725a5ed4f29abe3ccc34575610", "deposit_message_root": "c08d0ecd085bc0f50c35f1b34d8b8937b2b9c8a172a9808de70f8d448c526f07", "deposit_data_root": "149a5dfbba87109dac65142cc067aed97c9579730488cfe16625be3ce4f753a6", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "1.2.2"}, {"pubkey": "a2801622bc391724989004b5de78cb85746f85a303572691ecc945d9f5c61ec512127e58482e0dfcb4de77be3294ab01", "withdrawal_credentials": "00edff674c66a7f58285554e700183aeee5e740691de8087f7ce4d81f3597108", "amount": 32000000000, "signature": "966ae45b81402f1155ff313e48ca3a5346264dcc4bc9ee9e69994ee74368852d9d27c1684752735feba6c21042ad366b13f12c6e772c453518900435d87e2d743e1818e7471cf3574598e3b085c4527f643efe679841ddf8a480cac12b2c6e08", "deposit_message_root": "f5a530bee9698c2447961ecd210184fbb130bbb8e8916988d802d47e3b147842", "deposit_data_root": "f44dac412ae36929a84f64d5f7f91cada908a8f9e837fc70628f58804591798d", "fork_version": "01017000", "network_name": "holesky", "deposit_cli_version": "1.2.2"}] \ No newline at end of file diff --git a/validator_manager/test_vectors/vectors/mainnet_first_0_count_1_eth1_false/validator_keys/deposit_data-1715584089.json b/validator_manager/test_vectors/vectors/mainnet_first_0_count_1_eth1_false/validator_keys/deposit_data-1748939195.json similarity index 90% rename from validator_manager/test_vectors/vectors/mainnet_first_0_count_1_eth1_false/validator_keys/deposit_data-1715584089.json rename to validator_manager/test_vectors/vectors/mainnet_first_0_count_1_eth1_false/validator_keys/deposit_data-1748939195.json index d9ba926d1c..f8005651aa 100644 --- a/validator_manager/test_vectors/vectors/mainnet_first_0_count_1_eth1_false/validator_keys/deposit_data-1715584089.json +++ b/validator_manager/test_vectors/vectors/mainnet_first_0_count_1_eth1_false/validator_keys/deposit_data-1748939195.json @@ -1 +1 @@ -[{"pubkey": "88b6b3a9b391fa5593e8bce8d06102df1a56248368086929709fbb4a8570dc6a560febeef8159b19789e9c1fd13572f0", "withdrawal_credentials": "0049b6188ed20314309f617dd4030b8ddfac3c6e65759a03c226a13b2fe4cc72", "amount": 32000000000, "signature": "8ac88247c1b431a2d1eb2c5f00e7b8467bc21d6dc267f1af9ef727a12e32b4299e3b289ae5734a328b3202478dd746a80bf9e15a2217240dca1fc1b91a6b7ff7a0f5830d9a2610c1c30f19912346271357c21bd9af35a74097ebbdda2ddaf491", "deposit_message_root": "a9bc1d21cc009d9b10782a07213e37592c0d235463ed0117dec755758da90d51", "deposit_data_root": "807a20b2801eabfd9065c1b74ed6ae3e991a1ab770e4eaf268f30b37cfd2cbd7", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "2.7.0"}] \ No newline at end of file +[{"pubkey": "88b6b3a9b391fa5593e8bce8d06102df1a56248368086929709fbb4a8570dc6a560febeef8159b19789e9c1fd13572f0", "withdrawal_credentials": "0049b6188ed20314309f617dd4030b8ddfac3c6e65759a03c226a13b2fe4cc72", "amount": 32000000000, "signature": "8ac88247c1b431a2d1eb2c5f00e7b8467bc21d6dc267f1af9ef727a12e32b4299e3b289ae5734a328b3202478dd746a80bf9e15a2217240dca1fc1b91a6b7ff7a0f5830d9a2610c1c30f19912346271357c21bd9af35a74097ebbdda2ddaf491", "deposit_message_root": "a9bc1d21cc009d9b10782a07213e37592c0d235463ed0117dec755758da90d51", "deposit_data_root": "807a20b2801eabfd9065c1b74ed6ae3e991a1ab770e4eaf268f30b37cfd2cbd7", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "1.2.2"}] \ No newline at end of file diff --git a/validator_manager/test_vectors/vectors/mainnet_first_0_count_2_eth1_false/validator_keys/deposit_data-1715584092.json b/validator_manager/test_vectors/vectors/mainnet_first_0_count_2_eth1_false/validator_keys/deposit_data-1748939200.json similarity index 90% rename from validator_manager/test_vectors/vectors/mainnet_first_0_count_2_eth1_false/validator_keys/deposit_data-1715584092.json rename to validator_manager/test_vectors/vectors/mainnet_first_0_count_2_eth1_false/validator_keys/deposit_data-1748939200.json index f1ea4c6ad3..a8b1a056c4 100644 --- a/validator_manager/test_vectors/vectors/mainnet_first_0_count_2_eth1_false/validator_keys/deposit_data-1715584092.json +++ b/validator_manager/test_vectors/vectors/mainnet_first_0_count_2_eth1_false/validator_keys/deposit_data-1748939200.json @@ -1 +1 @@ -[{"pubkey": "88b6b3a9b391fa5593e8bce8d06102df1a56248368086929709fbb4a8570dc6a560febeef8159b19789e9c1fd13572f0", "withdrawal_credentials": "0049b6188ed20314309f617dd4030b8ddfac3c6e65759a03c226a13b2fe4cc72", "amount": 32000000000, "signature": "8ac88247c1b431a2d1eb2c5f00e7b8467bc21d6dc267f1af9ef727a12e32b4299e3b289ae5734a328b3202478dd746a80bf9e15a2217240dca1fc1b91a6b7ff7a0f5830d9a2610c1c30f19912346271357c21bd9af35a74097ebbdda2ddaf491", "deposit_message_root": "a9bc1d21cc009d9b10782a07213e37592c0d235463ed0117dec755758da90d51", "deposit_data_root": "807a20b2801eabfd9065c1b74ed6ae3e991a1ab770e4eaf268f30b37cfd2cbd7", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "2.7.0"}, {"pubkey": "a33ab9d93fb53c4f027944aaa11a13be0c150b7cc2e379d85d1ed4db38d178b4e4ebeae05832158b8c746c1961da00ce", "withdrawal_credentials": "00ad3748cbd1adc855c2bdab431f7e755a21663f4f6447ac888e5855c588af5a", "amount": 32000000000, "signature": "84b9fc8f260a1488c4c9a438f875edfa2bac964d651b2bc886d8442829b13f89752e807c8ca9bae9d50b1b506d3a64730015dd7f91e271ff9c1757d1996dcf6082fe5205cf6329fa2b6be303c21b66d75be608757a123da6ee4a4f14c01716d7", "deposit_message_root": "c5271aba974c802ff5b02b11fa33b545d7f430ff3b85c0f9eeef4cd59d83abf3", "deposit_data_root": "cd991ea8ff32e6b3940aed43b476c720fc1abd3040893b77a8a3efb306320d4c", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "2.7.0"}] \ No newline at end of file +[{"pubkey": "88b6b3a9b391fa5593e8bce8d06102df1a56248368086929709fbb4a8570dc6a560febeef8159b19789e9c1fd13572f0", "withdrawal_credentials": "0049b6188ed20314309f617dd4030b8ddfac3c6e65759a03c226a13b2fe4cc72", "amount": 32000000000, "signature": "8ac88247c1b431a2d1eb2c5f00e7b8467bc21d6dc267f1af9ef727a12e32b4299e3b289ae5734a328b3202478dd746a80bf9e15a2217240dca1fc1b91a6b7ff7a0f5830d9a2610c1c30f19912346271357c21bd9af35a74097ebbdda2ddaf491", "deposit_message_root": "a9bc1d21cc009d9b10782a07213e37592c0d235463ed0117dec755758da90d51", "deposit_data_root": "807a20b2801eabfd9065c1b74ed6ae3e991a1ab770e4eaf268f30b37cfd2cbd7", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "1.2.2"}, {"pubkey": "a33ab9d93fb53c4f027944aaa11a13be0c150b7cc2e379d85d1ed4db38d178b4e4ebeae05832158b8c746c1961da00ce", "withdrawal_credentials": "00ad3748cbd1adc855c2bdab431f7e755a21663f4f6447ac888e5855c588af5a", "amount": 32000000000, "signature": "84b9fc8f260a1488c4c9a438f875edfa2bac964d651b2bc886d8442829b13f89752e807c8ca9bae9d50b1b506d3a64730015dd7f91e271ff9c1757d1996dcf6082fe5205cf6329fa2b6be303c21b66d75be608757a123da6ee4a4f14c01716d7", "deposit_message_root": "c5271aba974c802ff5b02b11fa33b545d7f430ff3b85c0f9eeef4cd59d83abf3", "deposit_data_root": "cd991ea8ff32e6b3940aed43b476c720fc1abd3040893b77a8a3efb306320d4c", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "1.2.2"}] \ No newline at end of file diff --git a/validator_manager/test_vectors/vectors/mainnet_first_0_count_2_eth1_true/validator_keys/deposit_data-1715584107.json b/validator_manager/test_vectors/vectors/mainnet_first_0_count_2_eth1_true/validator_keys/deposit_data-1748939218.json similarity index 90% rename from validator_manager/test_vectors/vectors/mainnet_first_0_count_2_eth1_true/validator_keys/deposit_data-1715584107.json rename to validator_manager/test_vectors/vectors/mainnet_first_0_count_2_eth1_true/validator_keys/deposit_data-1748939218.json index 5741f23d8f..c3c25e9854 100644 --- a/validator_manager/test_vectors/vectors/mainnet_first_0_count_2_eth1_true/validator_keys/deposit_data-1715584107.json +++ b/validator_manager/test_vectors/vectors/mainnet_first_0_count_2_eth1_true/validator_keys/deposit_data-1748939218.json @@ -1 +1 @@ -[{"pubkey": "88b6b3a9b391fa5593e8bce8d06102df1a56248368086929709fbb4a8570dc6a560febeef8159b19789e9c1fd13572f0", "withdrawal_credentials": "0100000000000000000000000f51bb10119727a7e5ea3538074fb341f56b09ad", "amount": 32000000000, "signature": "a8461b58a5a5a0573c4af37da6ee4ba63e35894cffad6797d4a2c80f8f2c79d2c30c0de0299d8edde76e0c3f3e6d4f1e03cc377969f56d8760717d6e86f9316da9375573ce7bb87a8520daedb13c49284377f7a4f64a70aa2ca44b1581d47e20", "deposit_message_root": "62967565d11471da4af7769911926cd1826124048036b25616216f99bc320f13", "deposit_data_root": "d26d642a880ff8a109260fe69681840f6e1868c8c1cd2163a1db5a094e8db03a", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "2.7.0"}, {"pubkey": "a33ab9d93fb53c4f027944aaa11a13be0c150b7cc2e379d85d1ed4db38d178b4e4ebeae05832158b8c746c1961da00ce", "withdrawal_credentials": "0100000000000000000000000f51bb10119727a7e5ea3538074fb341f56b09ad", "amount": 32000000000, "signature": "93a398c09143203beb94c9223c7e18f36e5ea36090875284b222c2fcb16982e6f2e26f27ca9d30e3c6f6b5ad44857fc50f531925f4736810712f68a9d7a9c0eb664a851180f3b7d2e44a35717d43b3d3e4fd555354fa1dfa92f451870f36084d", "deposit_message_root": "ce110433298ffb78d827d67dcc13655344a139cb7e3ce10b341937c0a76b25b7", "deposit_data_root": "7c7617a2c11870ec49e975b3691b9f822d63938df38555161e23aa245b150c66", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "2.7.0"}] \ No newline at end of file +[{"pubkey": "88b6b3a9b391fa5593e8bce8d06102df1a56248368086929709fbb4a8570dc6a560febeef8159b19789e9c1fd13572f0", "withdrawal_credentials": "0100000000000000000000000f51bb10119727a7e5ea3538074fb341f56b09ad", "amount": 32000000000, "signature": "a8461b58a5a5a0573c4af37da6ee4ba63e35894cffad6797d4a2c80f8f2c79d2c30c0de0299d8edde76e0c3f3e6d4f1e03cc377969f56d8760717d6e86f9316da9375573ce7bb87a8520daedb13c49284377f7a4f64a70aa2ca44b1581d47e20", "deposit_message_root": "62967565d11471da4af7769911926cd1826124048036b25616216f99bc320f13", "deposit_data_root": "d26d642a880ff8a109260fe69681840f6e1868c8c1cd2163a1db5a094e8db03a", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "1.2.2"}, {"pubkey": "a33ab9d93fb53c4f027944aaa11a13be0c150b7cc2e379d85d1ed4db38d178b4e4ebeae05832158b8c746c1961da00ce", "withdrawal_credentials": "0100000000000000000000000f51bb10119727a7e5ea3538074fb341f56b09ad", "amount": 32000000000, "signature": "93a398c09143203beb94c9223c7e18f36e5ea36090875284b222c2fcb16982e6f2e26f27ca9d30e3c6f6b5ad44857fc50f531925f4736810712f68a9d7a9c0eb664a851180f3b7d2e44a35717d43b3d3e4fd555354fa1dfa92f451870f36084d", "deposit_message_root": "ce110433298ffb78d827d67dcc13655344a139cb7e3ce10b341937c0a76b25b7", "deposit_data_root": "7c7617a2c11870ec49e975b3691b9f822d63938df38555161e23aa245b150c66", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "1.2.2"}] \ No newline at end of file diff --git a/validator_manager/test_vectors/vectors/mainnet_first_1024_count_3_eth1_false/validator_keys/deposit_data-1715584103.json b/validator_manager/test_vectors/vectors/mainnet_first_1024_count_3_eth1_false/validator_keys/deposit_data-1748939214.json similarity index 87% rename from validator_manager/test_vectors/vectors/mainnet_first_1024_count_3_eth1_false/validator_keys/deposit_data-1715584103.json rename to validator_manager/test_vectors/vectors/mainnet_first_1024_count_3_eth1_false/validator_keys/deposit_data-1748939214.json index 9b9556cf9d..6bb47f5280 100644 --- a/validator_manager/test_vectors/vectors/mainnet_first_1024_count_3_eth1_false/validator_keys/deposit_data-1715584103.json +++ b/validator_manager/test_vectors/vectors/mainnet_first_1024_count_3_eth1_false/validator_keys/deposit_data-1748939214.json @@ -1 +1 @@ -[{"pubkey": "92ca8dddba4ae7ada6584c377fc53fb978ad9d5ee8db585b18e226c27682b326b3c68e10f5d99a453e233268c144e0ef", "withdrawal_credentials": "00dd4f8bfd1a48be288c2af8bb7315f6198900b5b3f56df010420d5328e682cb", "amount": 32000000000, "signature": "a0a96851892b257c032284928641021e58e0bcd277c3da5a2c41bcce6633d144781e4761261138277b5a8cf0ead59cce073e5a3bbc4704a37abf8cd1e290dc52e56cb0c334303945ebbb79be453c8177937e44e08f980679f1a2997fe58d2d86", "deposit_message_root": "5421d9177b4d035e6525506509ab702c5f458c53458dad437097b37cb8209b43", "deposit_data_root": "2bedaf48f8315d8631defc97c1c4c05a8152e2dc3fe779fc8e800dd67bd839a2", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "2.7.0"}, {"pubkey": "86474cd2874663445ef0ee02aca81b2b942a383fd4c7085fa675388e26c67afc0fef44a8666d46f571723e349ae4a0cb", "withdrawal_credentials": "001c31aa161ed1d3c481c1ee8f3ad1853217296a15877917fe3c2f680580ac01", "amount": 32000000000, "signature": "b469179ad8ba9d6ad71b99a3c7ae662d9b77cca3ee53b20ab2eb20beee31874ad47224e94e75578fa6ecd30c1d40a0b300053817f934169d84425691edf13216445fbc6dd9b0953ad3af20c834fba63c1f50c0b0f92dd8bf383cd2cc8e0431f1", "deposit_message_root": "279271f7065c83868c37021c32c014516b21e6188fb2cee4e8543c5d38427698", "deposit_data_root": "69862477671957ab0b3f1167c5cd550c107132a0079eb70eaa4bc5c5fe06b5a0", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "2.7.0"}, {"pubkey": "997e27aa262238beb01464434694a466321b5270297bdfdb944b65a3b6617b6ce2613628ac35a8f4cf2e9b4b55c46ef8", "withdrawal_credentials": "0097fffee9cf9fd91a6fa89af90e73f1cb8b8a043e742afaeb2e57b83b0845fe", "amount": 32000000000, "signature": "a8b05626657ce5b1801e0824aaeb21de2e1a11bc16cad6100ac911bcb873aaf7e7282f1f8465df4aaea998a1a4e1645f075e7e65f8c6b8688b0162f86be2128541f91fc9feb628bcab3b4afec1f7aeccaba04aaa54dc17c738233d360f94b97e", "deposit_message_root": "187e177721bfdd8ea13cb52c8de2dead29164a0e093efb640457a0e6ac918191", "deposit_data_root": "34ef32901d793cd9a0a3d93e7ee40e7be9abe6fb26f0b49a86b8ff29dc649930", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "2.7.0"}] \ No newline at end of file +[{"pubkey": "92ca8dddba4ae7ada6584c377fc53fb978ad9d5ee8db585b18e226c27682b326b3c68e10f5d99a453e233268c144e0ef", "withdrawal_credentials": "00dd4f8bfd1a48be288c2af8bb7315f6198900b5b3f56df010420d5328e682cb", "amount": 32000000000, "signature": "a0a96851892b257c032284928641021e58e0bcd277c3da5a2c41bcce6633d144781e4761261138277b5a8cf0ead59cce073e5a3bbc4704a37abf8cd1e290dc52e56cb0c334303945ebbb79be453c8177937e44e08f980679f1a2997fe58d2d86", "deposit_message_root": "5421d9177b4d035e6525506509ab702c5f458c53458dad437097b37cb8209b43", "deposit_data_root": "2bedaf48f8315d8631defc97c1c4c05a8152e2dc3fe779fc8e800dd67bd839a2", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "1.2.2"}, {"pubkey": "86474cd2874663445ef0ee02aca81b2b942a383fd4c7085fa675388e26c67afc0fef44a8666d46f571723e349ae4a0cb", "withdrawal_credentials": "001c31aa161ed1d3c481c1ee8f3ad1853217296a15877917fe3c2f680580ac01", "amount": 32000000000, "signature": "b469179ad8ba9d6ad71b99a3c7ae662d9b77cca3ee53b20ab2eb20beee31874ad47224e94e75578fa6ecd30c1d40a0b300053817f934169d84425691edf13216445fbc6dd9b0953ad3af20c834fba63c1f50c0b0f92dd8bf383cd2cc8e0431f1", "deposit_message_root": "279271f7065c83868c37021c32c014516b21e6188fb2cee4e8543c5d38427698", "deposit_data_root": "69862477671957ab0b3f1167c5cd550c107132a0079eb70eaa4bc5c5fe06b5a0", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "1.2.2"}, {"pubkey": "997e27aa262238beb01464434694a466321b5270297bdfdb944b65a3b6617b6ce2613628ac35a8f4cf2e9b4b55c46ef8", "withdrawal_credentials": "0097fffee9cf9fd91a6fa89af90e73f1cb8b8a043e742afaeb2e57b83b0845fe", "amount": 32000000000, "signature": "a8b05626657ce5b1801e0824aaeb21de2e1a11bc16cad6100ac911bcb873aaf7e7282f1f8465df4aaea998a1a4e1645f075e7e65f8c6b8688b0162f86be2128541f91fc9feb628bcab3b4afec1f7aeccaba04aaa54dc17c738233d360f94b97e", "deposit_message_root": "187e177721bfdd8ea13cb52c8de2dead29164a0e093efb640457a0e6ac918191", "deposit_data_root": "34ef32901d793cd9a0a3d93e7ee40e7be9abe6fb26f0b49a86b8ff29dc649930", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "1.2.2"}] \ No newline at end of file diff --git a/validator_manager/test_vectors/vectors/mainnet_first_12_count_1_eth1_false/validator_keys/deposit_data-1715584095.json b/validator_manager/test_vectors/vectors/mainnet_first_12_count_1_eth1_false/validator_keys/deposit_data-1748939204.json similarity index 90% rename from validator_manager/test_vectors/vectors/mainnet_first_12_count_1_eth1_false/validator_keys/deposit_data-1715584095.json rename to validator_manager/test_vectors/vectors/mainnet_first_12_count_1_eth1_false/validator_keys/deposit_data-1748939204.json index 84140f53fe..ec53025149 100644 --- a/validator_manager/test_vectors/vectors/mainnet_first_12_count_1_eth1_false/validator_keys/deposit_data-1715584095.json +++ b/validator_manager/test_vectors/vectors/mainnet_first_12_count_1_eth1_false/validator_keys/deposit_data-1748939204.json @@ -1 +1 @@ -[{"pubkey": "8b181759a027c09a409ef24f6b35db213982c2474e2017f3851d76b1c4e560a4238072f67a0c22cb667f940da4ea9ec9", "withdrawal_credentials": "00cbec90e8570679f565bd4645f73a078981067a705564283e61c93c81707842", "amount": 32000000000, "signature": "a57299cde3c2ea8dc17ad3ce5a38a5f6de69d198599150dc4df02624ba1d8672440d02c0d27c3dc3b8c9f86c679571ab14c798426acd9b059895f1f5887bdee805fb4e31bd8f93ec9e78403c23d7924f23eae6af056154f35fee03bf9ffe0e98", "deposit_message_root": "fcdf3d94740766299a95b3e477e64abadff6ab8978400578f241c93eb367b938", "deposit_data_root": "246619823b45d80f53a30404542ec4be447d4e268cc0afcdf480e6a846d58411", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "2.7.0"}] \ No newline at end of file +[{"pubkey": "8b181759a027c09a409ef24f6b35db213982c2474e2017f3851d76b1c4e560a4238072f67a0c22cb667f940da4ea9ec9", "withdrawal_credentials": "00cbec90e8570679f565bd4645f73a078981067a705564283e61c93c81707842", "amount": 32000000000, "signature": "a57299cde3c2ea8dc17ad3ce5a38a5f6de69d198599150dc4df02624ba1d8672440d02c0d27c3dc3b8c9f86c679571ab14c798426acd9b059895f1f5887bdee805fb4e31bd8f93ec9e78403c23d7924f23eae6af056154f35fee03bf9ffe0e98", "deposit_message_root": "fcdf3d94740766299a95b3e477e64abadff6ab8978400578f241c93eb367b938", "deposit_data_root": "246619823b45d80f53a30404542ec4be447d4e268cc0afcdf480e6a846d58411", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "1.2.2"}] \ No newline at end of file diff --git a/validator_manager/test_vectors/vectors/mainnet_first_99_count_2_eth1_false/validator_keys/deposit_data-1715584098.json b/validator_manager/test_vectors/vectors/mainnet_first_99_count_2_eth1_false/validator_keys/deposit_data-1748939209.json similarity index 90% rename from validator_manager/test_vectors/vectors/mainnet_first_99_count_2_eth1_false/validator_keys/deposit_data-1715584098.json rename to validator_manager/test_vectors/vectors/mainnet_first_99_count_2_eth1_false/validator_keys/deposit_data-1748939209.json index 3205390a43..7374811091 100644 --- a/validator_manager/test_vectors/vectors/mainnet_first_99_count_2_eth1_false/validator_keys/deposit_data-1715584098.json +++ b/validator_manager/test_vectors/vectors/mainnet_first_99_count_2_eth1_false/validator_keys/deposit_data-1748939209.json @@ -1 +1 @@ -[{"pubkey": "a57a4ed429e415b862cc758e75c93936e3f6339640d0763b969ba133a82c03717827fbdd8ec42fc862ed50e3b5b528dc", "withdrawal_credentials": "00864081ef2f5aec1aa667872615e25027f1fdc256a4948b6318cf75a8d635a3", "amount": 32000000000, "signature": "8ca8a6f30b4346d7b9912e3dcd820652bc472511f89d91fd102acfb0c8df1cfc7a2629f44170727e126e88f2847fe5c9081b13fb0838a2b2343a95cabf16f57708fc0cf846bc5307209ae976c34500cc826ff48ab64169d8bebec99dded5dd1d", "deposit_message_root": "c08d0ecd085bc0f50c35f1b34d8b8937b2b9c8a172a9808de70f8d448c526f07", "deposit_data_root": "c0c6cd40b43ea0fe7fcc284de9acd9c1bd001bb88c059c155393af22a6c85d46", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "2.7.0"}, {"pubkey": "a2801622bc391724989004b5de78cb85746f85a303572691ecc945d9f5c61ec512127e58482e0dfcb4de77be3294ab01", "withdrawal_credentials": "00edff674c66a7f58285554e700183aeee5e740691de8087f7ce4d81f3597108", "amount": 32000000000, "signature": "8c0784645c611b4f514a6519b737f2d02df3eba0e04cd30efebffcca769af8cc599ce28e4421cefe665ec31d3c34e44c174e0cca4891d8196796085e712459b45e411efecd07cf3258f1d6309a07a6dd52a0ae186e6184d37bf11cee36ec84e8", "deposit_message_root": "f5a530bee9698c2447961ecd210184fbb130bbb8e8916988d802d47e3b147842", "deposit_data_root": "c57790b77ef97318d4ec7b97ea07ea458d08209ba372bfe76171e2ece22d6130", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "2.7.0"}] \ No newline at end of file +[{"pubkey": "a57a4ed429e415b862cc758e75c93936e3f6339640d0763b969ba133a82c03717827fbdd8ec42fc862ed50e3b5b528dc", "withdrawal_credentials": "00864081ef2f5aec1aa667872615e25027f1fdc256a4948b6318cf75a8d635a3", "amount": 32000000000, "signature": "8ca8a6f30b4346d7b9912e3dcd820652bc472511f89d91fd102acfb0c8df1cfc7a2629f44170727e126e88f2847fe5c9081b13fb0838a2b2343a95cabf16f57708fc0cf846bc5307209ae976c34500cc826ff48ab64169d8bebec99dded5dd1d", "deposit_message_root": "c08d0ecd085bc0f50c35f1b34d8b8937b2b9c8a172a9808de70f8d448c526f07", "deposit_data_root": "c0c6cd40b43ea0fe7fcc284de9acd9c1bd001bb88c059c155393af22a6c85d46", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "1.2.2"}, {"pubkey": "a2801622bc391724989004b5de78cb85746f85a303572691ecc945d9f5c61ec512127e58482e0dfcb4de77be3294ab01", "withdrawal_credentials": "00edff674c66a7f58285554e700183aeee5e740691de8087f7ce4d81f3597108", "amount": 32000000000, "signature": "8c0784645c611b4f514a6519b737f2d02df3eba0e04cd30efebffcca769af8cc599ce28e4421cefe665ec31d3c34e44c174e0cca4891d8196796085e712459b45e411efecd07cf3258f1d6309a07a6dd52a0ae186e6184d37bf11cee36ec84e8", "deposit_message_root": "f5a530bee9698c2447961ecd210184fbb130bbb8e8916988d802d47e3b147842", "deposit_data_root": "c57790b77ef97318d4ec7b97ea07ea458d08209ba372bfe76171e2ece22d6130", "fork_version": "00000000", "network_name": "mainnet", "deposit_cli_version": "1.2.2"}] \ No newline at end of file