mirror of
https://github.com/sigp/lighthouse.git
synced 2026-03-09 03:31:45 +00:00
* Remove use of ethers_core::RlpStream
* Merge branch 'unstable' of https://github.com/sigp/lighthouse into remove_use_of_ethers_core
* Remove old code
* Simplify keccak call
* Remove unused package
* Merge branch 'unstable' of https://github.com/ethDreamer/lighthouse into remove_use_of_ethers_core
* Merge branch 'unstable' into remove_use_of_ethers_core
* Run clippy
* Merge branch 'remove_use_of_ethers_core' of https://github.com/dospore/lighthouse into remove_use_of_ethers_core
* Check all cargo fmt
* migrate to alloy primitives init
* fix deps
* integrate alloy-primitives
* resolve dep issues
* more changes based on dep changes
* add TODOs
* Merge branch 'unstable' of https://github.com/sigp/lighthouse into remove_use_of_ethers_core
* Revert lock
* Add BeaconBlocksByRange v3
* continue migration
* Revert "Add BeaconBlocksByRange v3"
This reverts commit e3ce7fc5ea.
* impl hash256 extended trait
* revert some uneeded diffs
* merge conflict resolved
* fix subnet id rshift calc
* rename to FixedBytesExtended
* debugging
* Merge branch 'unstable' of https://github.com/sigp/lighthouse into migrate-to-alloy-primitives
* fix failed test
* fixing more tests
* Merge branch 'unstable' of https://github.com/sigp/lighthouse into remove_use_of_ethers_core
* introduce a shim to convert between the two u256 types
* move alloy to wrokspace
* align alloy versions
* update
* update web3signer test certs
* refactor
* resolve failing tests
* linting
* fix graffiti string test
* fmt
* fix ef test
* resolve merge conflicts
* remove udep and revert cert
* cargo patch
* cyclic dep
* fix build error
* Merge branch 'unstable' of https://github.com/sigp/lighthouse into migrate-to-alloy-primitives
* resolve conflicts, update deps
* merge unstable
* fmt
* fix deps
* Merge branch 'unstable' of https://github.com/sigp/lighthouse into migrate-to-alloy-primitives
* resolve merge conflicts
* resolve conflicts, make necessary changes
* Remove patch
* fmt
* remove file
* merge conflicts
* sneaking in a smol change
* bump versions
* Merge remote-tracking branch 'origin/unstable' into migrate-to-alloy-primitives
* Updates for peerDAS
* Update ethereum_hashing to prevent dupe
* updated alloy-consensus, removed TODOs
* cargo update
* endianess fix
* Merge branch 'unstable' of https://github.com/sigp/lighthouse into migrate-to-alloy-primitives
* fmt
* fix merge
* fix test
* fixed_bytes crate
* minor fixes
* convert u256 to i64
* panic free mixin to_low_u64_le
* from_str_radix
* computbe_subnet api and ensuring we use big-endian
* Merge branch 'unstable' of https://github.com/sigp/lighthouse into migrate-to-alloy-primitives
* fix test
* Simplify subnet_id test
* Simplify some more tests
* Add tests to fixed_bytes crate
* Merge branch 'unstable' into migrate-to-alloy-primitives
77 lines
2.4 KiB
Rust
77 lines
2.4 KiB
Rust
#![cfg(test)]
|
|
|
|
use crate::test_utils::pubkey;
|
|
use crate::*;
|
|
use tempfile::tempdir;
|
|
use types::FixedBytesExtended;
|
|
|
|
#[test]
|
|
fn export_non_existent_key() {
|
|
let dir = tempdir().unwrap();
|
|
let slashing_db_file = dir.path().join("slashing_protection.sqlite");
|
|
let slashing_db = SlashingDatabase::create(&slashing_db_file).unwrap();
|
|
|
|
let key1 = pubkey(1);
|
|
let key2 = pubkey(2);
|
|
|
|
// Exporting two non-existent keys should fail on the first one.
|
|
let err = slashing_db
|
|
.export_interchange_info(Hash256::zero(), Some(&[key1, key2]))
|
|
.unwrap_err();
|
|
assert!(matches!(
|
|
err,
|
|
InterchangeError::NotSafe(NotSafe::UnregisteredValidator(k)) if k == key1
|
|
));
|
|
|
|
slashing_db.register_validator(key1).unwrap();
|
|
|
|
// Exporting one key that exists and one that doesn't should fail on the one that doesn't.
|
|
let err = slashing_db
|
|
.export_interchange_info(Hash256::zero(), Some(&[key1, key2]))
|
|
.unwrap_err();
|
|
assert!(matches!(
|
|
err,
|
|
InterchangeError::NotSafe(NotSafe::UnregisteredValidator(k)) if k == key2
|
|
));
|
|
|
|
// Exporting only keys that exist should work.
|
|
let interchange = slashing_db
|
|
.export_interchange_info(Hash256::zero(), Some(&[key1]))
|
|
.unwrap();
|
|
assert_eq!(interchange.data.len(), 1);
|
|
assert_eq!(interchange.data[0].pubkey, key1);
|
|
}
|
|
|
|
#[test]
|
|
fn export_same_key_twice() {
|
|
let dir = tempdir().unwrap();
|
|
let slashing_db_file = dir.path().join("slashing_protection.sqlite");
|
|
let slashing_db = SlashingDatabase::create(&slashing_db_file).unwrap();
|
|
|
|
let key1 = pubkey(1);
|
|
|
|
slashing_db.register_validator(key1).unwrap();
|
|
|
|
let export_single = slashing_db
|
|
.export_interchange_info(Hash256::zero(), Some(&[key1]))
|
|
.unwrap();
|
|
let export_double = slashing_db
|
|
.export_interchange_info(Hash256::zero(), Some(&[key1, key1]))
|
|
.unwrap();
|
|
|
|
assert_eq!(export_single.data.len(), 1);
|
|
|
|
// Allow the same data to be exported twice, this is harmless, albeit slightly inefficient.
|
|
assert_eq!(export_double.data.len(), 2);
|
|
assert_eq!(export_double.data[0], export_double.data[1]);
|
|
|
|
// The data should be identical to the single export.
|
|
assert_eq!(export_double.data[0], export_single.data[0]);
|
|
|
|
// The minified versions should be equal too.
|
|
assert_eq!(
|
|
export_single.minify().unwrap(),
|
|
export_double.minify().unwrap()
|
|
);
|
|
}
|