Files
lighthouse/validator_client/slashing_protection/tests/migration.rs
Eitan Seri-Levi 99e53b88c3 Migrate from ethereum-types to alloy-primitives (#6078)
* Remove use of ethers_core::RlpStream

* Merge branch 'unstable' of https://github.com/sigp/lighthouse into remove_use_of_ethers_core

* Remove old code

* Simplify keccak call

* Remove unused package

* Merge branch 'unstable' of https://github.com/ethDreamer/lighthouse into remove_use_of_ethers_core

* Merge branch 'unstable' into remove_use_of_ethers_core

* Run clippy

* Merge branch 'remove_use_of_ethers_core' of https://github.com/dospore/lighthouse into remove_use_of_ethers_core

* Check all cargo fmt

* migrate to alloy primitives init

* fix deps

* integrate alloy-primitives

* resolve dep issues

* more changes based on dep changes

* add TODOs

* Merge branch 'unstable' of https://github.com/sigp/lighthouse into remove_use_of_ethers_core

* Revert lock

* Add BeaconBlocksByRange v3

* continue migration

* Revert "Add BeaconBlocksByRange v3"

This reverts commit e3ce7fc5ea.

* impl hash256 extended trait

* revert some uneeded diffs

* merge conflict resolved

* fix subnet id rshift calc

* rename to FixedBytesExtended

* debugging

* Merge branch 'unstable' of https://github.com/sigp/lighthouse into migrate-to-alloy-primitives

* fix failed test

* fixing more tests

* Merge branch 'unstable' of https://github.com/sigp/lighthouse into remove_use_of_ethers_core

* introduce a shim to convert between the two u256 types

* move alloy to wrokspace

* align alloy versions

* update

* update web3signer test certs

* refactor

* resolve failing tests

* linting

* fix graffiti string test

* fmt

* fix ef test

* resolve merge conflicts

* remove udep and revert cert

* cargo patch

* cyclic dep

* fix build error

* Merge branch 'unstable' of https://github.com/sigp/lighthouse into migrate-to-alloy-primitives

* resolve conflicts, update deps

* merge unstable

* fmt

* fix deps

* Merge branch 'unstable' of https://github.com/sigp/lighthouse into migrate-to-alloy-primitives

* resolve merge conflicts

* resolve conflicts, make necessary changes

* Remove patch

* fmt

* remove file

* merge conflicts

* sneaking in a smol change

* bump versions

* Merge remote-tracking branch 'origin/unstable' into migrate-to-alloy-primitives

* Updates for peerDAS

* Update ethereum_hashing to prevent dupe

* updated alloy-consensus, removed TODOs

* cargo update

* endianess fix

* Merge branch 'unstable' of https://github.com/sigp/lighthouse into migrate-to-alloy-primitives

* fmt

* fix merge

* fix test

* fixed_bytes crate

* minor fixes

* convert u256 to i64

* panic free mixin to_low_u64_le

* from_str_radix

* computbe_subnet api and ensuring we use big-endian

* Merge branch 'unstable' of https://github.com/sigp/lighthouse into migrate-to-alloy-primitives

* fix test

* Simplify subnet_id test

* Simplify some more tests

* Add tests to fixed_bytes crate

* Merge branch 'unstable' into migrate-to-alloy-primitives
2024-09-02 08:03:24 +00:00

69 lines
2.4 KiB
Rust

//! Tests for upgrading a previous version of the database to the latest schema.
use slashing_protection::{NotSafe, SlashingDatabase};
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
use tempfile::tempdir;
use types::{FixedBytesExtended, Hash256};
fn test_data_dir() -> PathBuf {
Path::new(&std::env::var("CARGO_MANIFEST_DIR").unwrap()).join("migration-tests")
}
/// Copy `filename` from the test data dir to the temporary `dest` for testing.
fn make_copy(filename: &str, dest: &Path) -> PathBuf {
let source_file = test_data_dir().join(filename);
let dest_file = dest.join(filename);
fs::copy(source_file, &dest_file).unwrap();
dest_file
}
#[test]
fn add_enabled_column() {
let tmp = tempdir().unwrap();
let path = make_copy("v0_no_enabled_column.sqlite", tmp.path());
let num_expected_validators = 5;
// Database should open without errors, indicating successfull application of migrations.
// The input file has no `enabled` column, which should get added when opening it here.
let db = SlashingDatabase::open(&path).unwrap();
// Check that exporting an interchange file lists all the validators.
let interchange = db.export_all_interchange_info(Hash256::zero()).unwrap();
assert_eq!(interchange.data.len(), num_expected_validators);
db.with_transaction(|txn| {
// Check that all the validators are enabled and unique.
let uniq_validator_ids = interchange
.data
.iter()
.map(|data| {
let (validator_id, enabled) = db
.get_validator_id_with_status(txn, &data.pubkey)
.unwrap()
.unwrap();
assert!(enabled);
(validator_id, data.pubkey)
})
.collect::<HashMap<_, _>>();
assert_eq!(uniq_validator_ids.len(), num_expected_validators);
// Check that we can disable them all.
for (&validator_id, pubkey) in &uniq_validator_ids {
db.update_validator_status(txn, validator_id, false)
.unwrap();
let (loaded_id, enabled) = db
.get_validator_id_with_status(txn, pubkey)
.unwrap()
.unwrap();
assert_eq!(validator_id, loaded_id);
assert!(!enabled);
}
Ok::<_, NotSafe>(())
})
.unwrap();
}