diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 5b28a5ec71..9ce1991ac5 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -212,11 +212,9 @@ jobs: ## Testing Checklist (DELETE ME) - - - [ ] Run on synced Pyrmont Sigma Prime nodes. + - [ ] Run on synced Prater Sigma Prime nodes. - [ ] Run on synced Canary (mainnet) Sigma Prime nodes. - - [ ] Resync a Pyrmont node. - [ ] Resync a Prater node. - [ ] Resync a mainnet node. @@ -232,6 +230,19 @@ jobs: Add a summary. + ## Update Priority + + This table provides priorities for which classes of users should update particular components. + + |User Class |Beacon Node | Validator Client| + --- | --- | --- + |Staking Users| | | + |Non-Staking Users| |---| + + *See [Update + Priorities](https://lighthouse-book.sigmaprime.io/installation-priorities.html) + more information about this table.* + ## All Changes ${{ steps.changelog.outputs.CHANGELOG }} diff --git a/Cargo.lock b/Cargo.lock index 06b1025bd9..38f9547c74 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -223,7 +223,7 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b88d82667eca772c4aa12f0f1348b3ae643424c8876448f3f7bd5787032e234c" dependencies = [ - "autocfg 1.0.1", + "autocfg 1.1.0", ] [[package]] @@ -250,15 +250,18 @@ dependencies = [ [[package]] name = "autocfg" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2" +checksum = "0dde43e75fd43e8a1bf86103336bc699aa8d17ad1be60c76c0bdfd4828e19b78" +dependencies = [ + "autocfg 1.1.0", +] [[package]] name = "autocfg" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "backtrace" @@ -275,12 +278,6 @@ dependencies = [ "rustc-demangle", ] -[[package]] -name = "base16ct" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" - [[package]] name = "base64" version = "0.12.3" @@ -346,7 +343,7 @@ dependencies = [ [[package]] name = "beacon_node" -version = "2.1.2" +version = "2.1.3" dependencies = [ "beacon_chain", "clap", @@ -445,13 +442,11 @@ dependencies = [ [[package]] name = "blake2" -version = "0.9.2" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a4e37d16930f5459780f5621038b6382b9bb37c19016f39fb6b5808d831f174" +checksum = "b94ba84325db59637ffc528bbe8c7f86c02c57cff5c0e2b9b00f9a851f42f309" dependencies = [ - "crypto-mac 0.8.0", - "digest 0.9.0", - "opaque-debug", + "digest 0.10.1", ] [[package]] @@ -466,9 +461,9 @@ dependencies = [ [[package]] name = "block-buffer" -version = "0.10.0" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1d36a02058e76b040de25a4464ba1c80935655595b661505c8b39b664828b95" +checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" dependencies = [ "generic-array", ] @@ -512,7 +507,7 @@ dependencies = [ [[package]] name = "boot_node" -version = "2.1.2" +version = "2.1.3" dependencies = [ "beacon_node", "clap", @@ -661,21 +656,21 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chacha20" -version = "0.7.1" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fee7ad89dc1128635074c268ee661f90c3f7e83d9fd12910608c36b47d6c3412" +checksum = "01b72a433d0cf2aef113ba70f62634c56fddb0f244e6377185c56a7cadbd8f91" dependencies = [ "cfg-if", "cipher", - "cpufeatures 0.1.5", + "cpufeatures 0.2.1", "zeroize", ] [[package]] name = "chacha20poly1305" -version = "0.8.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1580317203210c517b6d44794abfbe600698276db18127e37ad3e69bf5e848e5" +checksum = "3b84ed6d1d5f7aa9bdde921a5090e0ca4d934d250ea3b402a5fab3a994e28a2a" dependencies = [ "aead", "chacha20", @@ -708,9 +703,9 @@ dependencies = [ [[package]] name = "clang-sys" -version = "1.3.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa66045b9cb23c2e9c1520732030608b02ee07e5cfaa5a521ec15ded7fa24c90" +checksum = "4cc00842eed744b858222c4c9faf7243aafc6d33f92f96935263ef4d8a41ce21" dependencies = [ "glob", "libc", @@ -822,12 +817,6 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "279bc8fc53f788a75c7804af68237d1fce02cde1e275a886a4b320604dc2aeda" -[[package]] -name = "const-oid" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4c78c047431fee22c1a7bb92e00ad095a02a983affe4d8a72e2a2c62c1b94f3" - [[package]] name = "convert_case" version = "0.4.0" @@ -836,9 +825,9 @@ checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" [[package]] name = "core-foundation" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6888e10551bb93e424d8df1d07f1a8b4fceb0001a3a4b048bfc47554946f47b3" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" dependencies = [ "core-foundation-sys", "libc", @@ -850,6 +839,15 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" +[[package]] +name = "core2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" +dependencies = [ + "memchr", +] + [[package]] name = "cpufeatures" version = "0.1.5" @@ -870,9 +868,9 @@ dependencies = [ [[package]] name = "crc32fast" -version = "1.3.1" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2209c310e29876f7f0b2721e7e26b84aff178aa3da5d091f9bfbf47669e60e3" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" dependencies = [ "cfg-if", ] @@ -936,9 +934,9 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.6" +version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97242a70df9b89a65d0b6df3c4bf5b9ce03c5b7309019777fbde37e7537f8762" +checksum = "c00d6d2ea26e8b151d99093005cb442fb9a37aeaca582a03ec70946f49ab5ed9" dependencies = [ "cfg-if", "crossbeam-utils", @@ -949,9 +947,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.6" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcae03edb34f947e64acdb1c33ec169824e20657e9ecb61cef6c8c74dcb8120" +checksum = "b5e5bed1f1c269533fa816a0a5492b3545209a205ca1a54842be180eb63a16a6" dependencies = [ "cfg-if", "lazy_static", @@ -963,18 +961,6 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" -[[package]] -name = "crypto-bigint" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03c6a1d5fa1de37e071642dfa44ec552ca5b299adb128fab16138e24b548fd21" -dependencies = [ - "generic-array", - "rand_core 0.6.3", - "subtle", - "zeroize", -] - [[package]] name = "crypto-common" version = "0.1.1" @@ -1058,6 +1044,19 @@ dependencies = [ "zeroize", ] +[[package]] +name = "curve25519-dalek" +version = "4.0.0-pre.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4033478fbf70d6acf2655ac70da91ee65852d69daf7a67bf7a2f518fb47aafcf" +dependencies = [ + "byteorder", + "digest 0.9.0", + "rand_core 0.6.3", + "subtle", + "zeroize", +] + [[package]] name = "darling" version = "0.13.1" @@ -1140,19 +1139,10 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2eeb9d92785d1facb50567852ce75d0858630630e7eabea59cf7eb7474051087" dependencies = [ - "const-oid 0.5.2", + "const-oid", "typenum", ] -[[package]] -name = "der" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6919815d73839e7ad218de758883aae3a257ba6759ce7a9992501efbb53d705c" -dependencies = [ - "const-oid 0.7.1", -] - [[package]] name = "derivative" version = "2.2.0" @@ -1203,9 +1193,10 @@ version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b697d66081d42af4fba142d56918a3cb21dc8eb63372c6b85d14f44fb9c5979b" dependencies = [ - "block-buffer 0.10.0", + "block-buffer 0.10.2", "crypto-common", "generic-array", + "subtle", ] [[package]] @@ -1275,7 +1266,7 @@ dependencies = [ "hex", "hkdf", "lazy_static", - "libp2p-core 0.30.0", + "libp2p-core 0.30.2", "lru", "parking_lot", "rand 0.8.4", @@ -1287,15 +1278,15 @@ dependencies = [ "tokio-util", "tracing", "tracing-subscriber", - "uint 0.9.2", + "uint 0.9.3", "zeroize", ] [[package]] name = "dtoa" -version = "0.4.8" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56899898ce76aaf4a0f24d914c97ea6ed976d42fec6ad33fcbb0a1103e07b2b0" +checksum = "5caaa75cbd2b960ff1e5392d2cfb1f44717fffe12fc1f32b7b5d1267f99732a6" [[package]] name = "ecdsa" @@ -1303,24 +1294,12 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34d33b390ab82f2e1481e331dbd0530895640179d2128ef9a79cc690b78d1eba" dependencies = [ - "der 0.3.5", - "elliptic-curve 0.9.12", + "der", + "elliptic-curve", "hmac 0.11.0", "signature", ] -[[package]] -name = "ecdsa" -version = "0.13.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0d69ae62e0ce582d56380743515fefaf1a8c70cec685d9677636d7e30ae9dc9" -dependencies = [ - "der 0.5.1", - "elliptic-curve 0.11.12", - "rfc6979", - "signature", -] - [[package]] name = "ed25519" version = "1.3.0" @@ -1336,7 +1315,7 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" dependencies = [ - "curve25519-dalek", + "curve25519-dalek 3.2.0", "ed25519", "rand 0.7.3", "serde", @@ -1388,33 +1367,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c13e9b0c3c4170dcc2a12783746c4205d98e18957f57854251eea3f9750fe005" dependencies = [ "bitvec 0.20.4", - "ff 0.9.0", + "ff", "generic-array", - "group 0.9.0", + "group", "pkcs8", "rand_core 0.6.3", "subtle", "zeroize", ] -[[package]] -name = "elliptic-curve" -version = "0.11.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25b477563c2bfed38a3b7a60964c49e058b2510ad3f12ba3483fd8f62c2306d6" -dependencies = [ - "base16ct", - "crypto-bigint", - "der 0.5.1", - "ff 0.11.0", - "generic-array", - "group 0.11.0", - "rand_core 0.6.3", - "sec1", - "subtle", - "zeroize", -] - [[package]] name = "encoding_rs" version = "0.8.30" @@ -1782,7 +1743,7 @@ dependencies = [ "serde_json", "sha3", "thiserror", - "uint 0.9.2", + "uint 0.9.3", ] [[package]] @@ -1836,7 +1797,7 @@ dependencies = [ "impl-rlp 0.3.0", "impl-serde", "primitive-types 0.9.1", - "uint 0.9.2", + "uint 0.9.3", ] [[package]] @@ -1850,7 +1811,7 @@ dependencies = [ "impl-rlp 0.3.0", "impl-serde", "primitive-types 0.10.1", - "uint 0.9.2", + "uint 0.9.3", ] [[package]] @@ -1930,16 +1891,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "ff" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2958d04124b9f27f175eaeb9a9f383d026098aa837eadd8ba22c11f13a05b9e" -dependencies = [ - "rand_core 0.6.3", - "subtle", -] - [[package]] name = "ffi-opaque" version = "2.0.1" @@ -2074,9 +2025,9 @@ checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" [[package]] name = "futures" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28560757fe2bb34e79f907794bb6b22ae8b0e5c669b638a1132f2592b19035b4" +checksum = "f73fe65f54d1e12b726f517d3e2135ca3125a437b6d998caf1962961f7172d9e" dependencies = [ "futures-channel", "futures-core", @@ -2089,9 +2040,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3dda0b6588335f360afc675d0564c17a77a2bda81ca178a4b6081bd86c7f0b" +checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" dependencies = [ "futures-core", "futures-sink", @@ -2099,15 +2050,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0c8ff0461b82559810cdccfde3215c3f373807f5e5232b71479bff7bb2583d7" +checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" [[package]] name = "futures-executor" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29d6d2ff5bb10fb95c85b8ce46538a2e5f5e7fdc755623a7d4529ab8a4ed9d2a" +checksum = "9420b90cfa29e327d0429f19be13e7ddb68fa1cccb09d65e5706b8c7a749b8a6" dependencies = [ "futures-core", "futures-task", @@ -2117,15 +2068,15 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f9d34af5a1aac6fb380f735fe510746c38067c5bf16c7fd250280503c971b2" +checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" [[package]] name = "futures-macro" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dbd947adfffb0efc70599b3ddcf7b5597bb5fa9e245eb99f62b3a5f7bb8bd3c" +checksum = "33c1e13800337f4d4d7a316bf45a567dbcb6ffe087f16424852d97e97a91f512" dependencies = [ "proc-macro2", "quote", @@ -2145,15 +2096,15 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3055baccb68d74ff6480350f8d6eb8fcfa3aa11bdc1a1ae3afdd0514617d508" +checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868" [[package]] name = "futures-task" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ee7c6485c30167ce4dfb83ac568a849fe53274c831081476ee13e0dce1aad72" +checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" [[package]] name = "futures-timer" @@ -2163,9 +2114,9 @@ checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" [[package]] name = "futures-util" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b5cf40b47a271f77a8b1bec03ca09044d99d2372c0de244e66430761127164" +checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" dependencies = [ "futures-channel", "futures-core", @@ -2284,18 +2235,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61b3c1e8b4f1ca07e6605ea1be903a5f6956aec5c8a67fd44d56076631675ed8" dependencies = [ - "ff 0.9.0", - "rand_core 0.6.3", - "subtle", -] - -[[package]] -name = "group" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc5ac374b108929de78460075f3dc439fa66df9d8fc77e8f12caa5165fcf0c89" -dependencies = [ - "ff 0.11.0", + "ff", "rand_core 0.6.3", "subtle", ] @@ -2535,9 +2475,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.5.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acd94fdbe1d4ff688b67b04eee2e17bd50995534a61539e45adfefb45e5e5503" +checksum = "9100414882e15fb7feccb4897e5f0ff0ff1ca7d1a86a23208ada4d7a18e6c6c4" [[package]] name = "httpdate" @@ -2696,9 +2636,9 @@ dependencies = [ [[package]] name = "impl-trait-for-tuples" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5dacb10c5b3bb92d46ba347505a9041e676bb20ad220101326bffb0c93031ee" +checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" dependencies = [ "proc-macro2", "quote", @@ -2711,7 +2651,7 @@ version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" dependencies = [ - "autocfg 1.0.1", + "autocfg 1.1.0", "hashbrown", ] @@ -2842,8 +2782,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3e8e491ed22bc161583a1c77e42313672c483eba6bd9d7afec0f1131d0b9ce" dependencies = [ "cfg-if", - "ecdsa 0.11.1", - "elliptic-curve 0.9.12", + "ecdsa", + "elliptic-curve", "sha2 0.9.9", ] @@ -2870,7 +2810,7 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "lcli" -version = "2.1.2" +version = "2.1.3" dependencies = [ "account_utils", "bls", @@ -2927,9 +2867,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.116" +version = "0.2.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "565dbd88872dbe4cc8a46e527f26483c1d1f7afa6b884a3bd6cd893d4f98da74" +checksum = "e74d72e0f9b65b5b4ca49a346af3976df0f9c61d550727f349ecd559f251a26c" [[package]] name = "libflate" @@ -2963,9 +2903,9 @@ dependencies = [ [[package]] name = "libm" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7d73b3f436185384286bd8098d17ec07c9a7d2388a6599f824d8502b529702a" +checksum = "33a33a362ce288760ec6a508b94caaec573ae7d3bbbd91b87aa0bad4456839db" [[package]] name = "libmdbx" @@ -2985,8 +2925,8 @@ dependencies = [ [[package]] name = "libp2p" -version = "0.42.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=17861d9cac121f7e448585a7f052d5eab4618826#17861d9cac121f7e448585a7f052d5eab4618826" +version = "0.43.0" +source = "git+https://github.com/sigp/rust-libp2p?rev=e213703e616eaba3c482d7714775e0d37c4ae8e5#e213703e616eaba3c482d7714775e0d37c4ae8e5" dependencies = [ "atomic", "bytes", @@ -2995,7 +2935,7 @@ dependencies = [ "getrandom 0.2.4", "instant", "lazy_static", - "libp2p-core 0.31.0", + "libp2p-core 0.32.0", "libp2p-dns", "libp2p-gossipsub", "libp2p-identify", @@ -3008,7 +2948,7 @@ dependencies = [ "libp2p-tcp", "libp2p-websocket", "libp2p-yamux", - "multiaddr", + "multiaddr 0.14.0", "parking_lot", "pin-project 1.0.10", "rand 0.7.3", @@ -3017,9 +2957,9 @@ dependencies = [ [[package]] name = "libp2p-core" -version = "0.30.0" +version = "0.30.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef22d9bba1e8bcb7ec300073e6802943fe8abb8190431842262b5f1c30abba1" +checksum = "86aad7d54df283db817becded03e611137698a6509d4237a96881976a162340c" dependencies = [ "asn1_der", "bs58", @@ -3028,11 +2968,12 @@ dependencies = [ "fnv", "futures", "futures-timer", + "instant", "lazy_static", "libsecp256k1 0.7.0", "log", - "multiaddr", - "multihash", + "multiaddr 0.13.0", + "multihash 0.14.0", "multistream-select 0.10.4", "parking_lot", "pin-project 1.0.10", @@ -3051,8 +2992,8 @@ dependencies = [ [[package]] name = "libp2p-core" -version = "0.31.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=17861d9cac121f7e448585a7f052d5eab4618826#17861d9cac121f7e448585a7f052d5eab4618826" +version = "0.32.0" +source = "git+https://github.com/sigp/rust-libp2p?rev=e213703e616eaba3c482d7714775e0d37c4ae8e5#e213703e616eaba3c482d7714775e0d37c4ae8e5" dependencies = [ "asn1_der", "bs58", @@ -3065,10 +3006,9 @@ dependencies = [ "lazy_static", "libsecp256k1 0.7.0", "log", - "multiaddr", - "multihash", + "multiaddr 0.14.0", + "multihash 0.16.1", "multistream-select 0.11.0", - "p256", "parking_lot", "pin-project 1.0.10", "prost", @@ -3086,11 +3026,11 @@ dependencies = [ [[package]] name = "libp2p-dns" -version = "0.31.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=17861d9cac121f7e448585a7f052d5eab4618826#17861d9cac121f7e448585a7f052d5eab4618826" +version = "0.32.0" +source = "git+https://github.com/sigp/rust-libp2p?rev=e213703e616eaba3c482d7714775e0d37c4ae8e5#e213703e616eaba3c482d7714775e0d37c4ae8e5" dependencies = [ "futures", - "libp2p-core 0.31.0", + "libp2p-core 0.32.0", "log", "smallvec", "trust-dns-resolver", @@ -3098,8 +3038,8 @@ dependencies = [ [[package]] name = "libp2p-gossipsub" -version = "0.35.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=17861d9cac121f7e448585a7f052d5eab4618826#17861d9cac121f7e448585a7f052d5eab4618826" +version = "0.36.0" +source = "git+https://github.com/sigp/rust-libp2p?rev=e213703e616eaba3c482d7714775e0d37c4ae8e5#e213703e616eaba3c482d7714775e0d37c4ae8e5" dependencies = [ "asynchronous-codec", "base64 0.13.0", @@ -3107,31 +3047,30 @@ dependencies = [ "bytes", "fnv", "futures", - "futures-timer", "hex_fmt", "instant", - "libp2p-core 0.31.0", + "libp2p-core 0.32.0", "libp2p-swarm", "log", - "open-metrics-client", - "pin-project 1.0.10", + "prometheus-client", "prost", "prost-build", "rand 0.7.3", "regex", "sha2 0.10.1", "smallvec", + "tokio", "unsigned-varint 0.7.1", ] [[package]] name = "libp2p-identify" -version = "0.33.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=17861d9cac121f7e448585a7f052d5eab4618826#17861d9cac121f7e448585a7f052d5eab4618826" +version = "0.34.0" +source = "git+https://github.com/sigp/rust-libp2p?rev=e213703e616eaba3c482d7714775e0d37c4ae8e5#e213703e616eaba3c482d7714775e0d37c4ae8e5" dependencies = [ "futures", "futures-timer", - "libp2p-core 0.31.0", + "libp2p-core 0.32.0", "libp2p-swarm", "log", "lru", @@ -3142,25 +3081,25 @@ dependencies = [ [[package]] name = "libp2p-metrics" -version = "0.3.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=17861d9cac121f7e448585a7f052d5eab4618826#17861d9cac121f7e448585a7f052d5eab4618826" +version = "0.4.0" +source = "git+https://github.com/sigp/rust-libp2p?rev=e213703e616eaba3c482d7714775e0d37c4ae8e5#e213703e616eaba3c482d7714775e0d37c4ae8e5" dependencies = [ - "libp2p-core 0.31.0", + "libp2p-core 0.32.0", "libp2p-gossipsub", "libp2p-identify", "libp2p-swarm", - "open-metrics-client", + "prometheus-client", ] [[package]] name = "libp2p-mplex" -version = "0.31.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=17861d9cac121f7e448585a7f052d5eab4618826#17861d9cac121f7e448585a7f052d5eab4618826" +version = "0.32.0" +source = "git+https://github.com/sigp/rust-libp2p?rev=e213703e616eaba3c482d7714775e0d37c4ae8e5#e213703e616eaba3c482d7714775e0d37c4ae8e5" dependencies = [ "asynchronous-codec", "bytes", "futures", - "libp2p-core 0.31.0", + "libp2p-core 0.32.0", "log", "nohash-hasher", "parking_lot", @@ -3171,14 +3110,14 @@ dependencies = [ [[package]] name = "libp2p-noise" -version = "0.34.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=17861d9cac121f7e448585a7f052d5eab4618826#17861d9cac121f7e448585a7f052d5eab4618826" +version = "0.35.0" +source = "git+https://github.com/sigp/rust-libp2p?rev=e213703e616eaba3c482d7714775e0d37c4ae8e5#e213703e616eaba3c482d7714775e0d37c4ae8e5" dependencies = [ "bytes", - "curve25519-dalek", + "curve25519-dalek 3.2.0", "futures", "lazy_static", - "libp2p-core 0.31.0", + "libp2p-core 0.32.0", "log", "prost", "prost-build", @@ -3192,13 +3131,13 @@ dependencies = [ [[package]] name = "libp2p-plaintext" -version = "0.31.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=17861d9cac121f7e448585a7f052d5eab4618826#17861d9cac121f7e448585a7f052d5eab4618826" +version = "0.32.0" +source = "git+https://github.com/sigp/rust-libp2p?rev=e213703e616eaba3c482d7714775e0d37c4ae8e5#e213703e616eaba3c482d7714775e0d37c4ae8e5" dependencies = [ "asynchronous-codec", "bytes", "futures", - "libp2p-core 0.31.0", + "libp2p-core 0.32.0", "log", "prost", "prost-build", @@ -3208,14 +3147,14 @@ dependencies = [ [[package]] name = "libp2p-swarm" -version = "0.33.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=17861d9cac121f7e448585a7f052d5eab4618826#17861d9cac121f7e448585a7f052d5eab4618826" +version = "0.34.0" +source = "git+https://github.com/sigp/rust-libp2p?rev=e213703e616eaba3c482d7714775e0d37c4ae8e5#e213703e616eaba3c482d7714775e0d37c4ae8e5" dependencies = [ "either", "futures", "futures-timer", "instant", - "libp2p-core 0.31.0", + "libp2p-core 0.32.0", "log", "rand 0.7.3", "smallvec", @@ -3225,7 +3164,7 @@ dependencies = [ [[package]] name = "libp2p-swarm-derive" version = "0.26.1" -source = "git+https://github.com/libp2p/rust-libp2p?rev=17861d9cac121f7e448585a7f052d5eab4618826#17861d9cac121f7e448585a7f052d5eab4618826" +source = "git+https://github.com/sigp/rust-libp2p?rev=e213703e616eaba3c482d7714775e0d37c4ae8e5#e213703e616eaba3c482d7714775e0d37c4ae8e5" dependencies = [ "quote", "syn", @@ -3233,15 +3172,15 @@ dependencies = [ [[package]] name = "libp2p-tcp" -version = "0.31.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=17861d9cac121f7e448585a7f052d5eab4618826#17861d9cac121f7e448585a7f052d5eab4618826" +version = "0.32.0" +source = "git+https://github.com/sigp/rust-libp2p?rev=e213703e616eaba3c482d7714775e0d37c4ae8e5#e213703e616eaba3c482d7714775e0d37c4ae8e5" dependencies = [ "futures", "futures-timer", "if-addrs 0.7.0", "ipnet", "libc", - "libp2p-core 0.31.0", + "libp2p-core 0.32.0", "log", "socket2 0.4.4", "tokio", @@ -3249,13 +3188,13 @@ dependencies = [ [[package]] name = "libp2p-websocket" -version = "0.33.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=17861d9cac121f7e448585a7f052d5eab4618826#17861d9cac121f7e448585a7f052d5eab4618826" +version = "0.34.0" +source = "git+https://github.com/sigp/rust-libp2p?rev=e213703e616eaba3c482d7714775e0d37c4ae8e5#e213703e616eaba3c482d7714775e0d37c4ae8e5" dependencies = [ "either", "futures", "futures-rustls", - "libp2p-core 0.31.0", + "libp2p-core 0.32.0", "log", "quicksink", "rw-stream-sink", @@ -3266,11 +3205,11 @@ dependencies = [ [[package]] name = "libp2p-yamux" -version = "0.35.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=17861d9cac121f7e448585a7f052d5eab4618826#17861d9cac121f7e448585a7f052d5eab4618826" +version = "0.36.0" +source = "git+https://github.com/sigp/rust-libp2p?rev=e213703e616eaba3c482d7714775e0d37c4ae8e5#e213703e616eaba3c482d7714775e0d37c4ae8e5" dependencies = [ "futures", - "libp2p-core 0.31.0", + "libp2p-core 0.32.0", "parking_lot", "thiserror", "yamux", @@ -3396,7 +3335,7 @@ dependencies = [ [[package]] name = "lighthouse" -version = "2.1.2" +version = "2.1.3" dependencies = [ "account_manager", "account_utils", @@ -3458,8 +3397,8 @@ dependencies = [ "lighthouse_metrics", "lighthouse_version", "lru", - "open-metrics-client", "parking_lot", + "prometheus-client", "rand 0.7.3", "regex", "serde", @@ -3631,7 +3570,7 @@ version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" dependencies = [ - "autocfg 1.0.1", + "autocfg 1.1.0", ] [[package]] @@ -3703,7 +3642,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" dependencies = [ "adler", - "autocfg 1.0.1", + "autocfg 1.1.0", ] [[package]] @@ -3758,7 +3697,25 @@ dependencies = [ "bs58", "byteorder", "data-encoding", - "multihash", + "multihash 0.14.0", + "percent-encoding", + "serde", + "static_assertions", + "unsigned-varint 0.7.1", + "url", +] + +[[package]] +name = "multiaddr" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c580bfdd8803cce319b047d239559a22f809094aaea4ac13902a1fdcfcd4261" +dependencies = [ + "arrayref", + "bs58", + "byteorder", + "data-encoding", + "multihash 0.16.1", "percent-encoding", "serde", "static_assertions", @@ -3774,11 +3731,24 @@ checksum = "752a61cd890ff691b4411423d23816d5866dd5621e4d1c5687a53b94b5a979d8" dependencies = [ "digest 0.9.0", "generic-array", - "multihash-derive", + "multihash-derive 0.7.2", "sha2 0.9.9", "unsigned-varint 0.7.1", ] +[[package]] +name = "multihash" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7392bffd88bc0c4f8297e36a777ab9f80b7127409c4a1acb8fee99c9f27addcd" +dependencies = [ + "core2", + "digest 0.10.1", + "multihash-derive 0.8.0", + "sha2 0.10.1", + "unsigned-varint 0.7.1", +] + [[package]] name = "multihash-derive" version = "0.7.2" @@ -3793,6 +3763,20 @@ dependencies = [ "synstructure", ] +[[package]] +name = "multihash-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc076939022111618a5026d3be019fd8b366e76314538ff9a1b59ffbcbf98bcd" +dependencies = [ + "proc-macro-crate", + "proc-macro-error", + "proc-macro2", + "quote", + "syn", + "synstructure", +] + [[package]] name = "multimap" version = "0.8.3" @@ -3852,7 +3836,7 @@ dependencies = [ [[package]] name = "multistream-select" version = "0.11.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=17861d9cac121f7e448585a7f052d5eab4618826#17861d9cac121f7e448585a7f052d5eab4618826" +source = "git+https://github.com/sigp/rust-libp2p?rev=e213703e616eaba3c482d7714775e0d37c4ae8e5#e213703e616eaba3c482d7714775e0d37c4ae8e5" dependencies = [ "bytes", "futures", @@ -4000,7 +3984,7 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" dependencies = [ - "autocfg 1.0.1", + "autocfg 1.1.0", "num-integer", "num-traits", ] @@ -4011,7 +3995,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d51546d704f52ef14b3c962b5776e53d5b862e5790e40a350d366c209bd7f7a" dependencies = [ - "autocfg 0.1.7", + "autocfg 0.1.8", "byteorder", "lazy_static", "libm", @@ -4030,7 +4014,7 @@ version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" dependencies = [ - "autocfg 1.0.1", + "autocfg 1.1.0", "num-traits", ] @@ -4040,7 +4024,7 @@ version = "0.1.42" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2021c8337a54d21aca0d59a92577a029af9431cb59b909b03252b9c164fad59" dependencies = [ - "autocfg 1.0.1", + "autocfg 1.1.0", "num-integer", "num-traits", ] @@ -4051,7 +4035,7 @@ version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" dependencies = [ - "autocfg 1.0.1", + "autocfg 1.1.0", ] [[package]] @@ -4100,29 +4084,6 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" -[[package]] -name = "open-metrics-client" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e224744b2e4da5b241857d2363a13bce60425f7b6ae2a5ff88d4d5557d9cc85" -dependencies = [ - "dtoa", - "itoa 0.4.8", - "open-metrics-client-derive-text-encode", - "owning_ref", -] - -[[package]] -name = "open-metrics-client-derive-text-encode" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a15c83b586f00268c619c1cb3340ec1a6f59dd9ba1d9833a273a68e6d5cd8ffc" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "openssl" version = "0.10.38" @@ -4158,7 +4119,7 @@ version = "0.9.72" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e46109c383602735fa0a2e48dd2b7c892b048e1bf69e5c3b1d804b7d9c203cb" dependencies = [ - "autocfg 1.0.1", + "autocfg 1.1.0", "cc", "libc", "openssl-src", @@ -4195,18 +4156,6 @@ dependencies = [ "stable_deref_trait", ] -[[package]] -name = "p256" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19736d80675fbe9fe33426268150b951a3fb8f5cfca2a23a17c85ef3adb24e3b" -dependencies = [ - "ecdsa 0.13.4", - "elliptic-curve 0.11.12", - "sec1", - "sha2 0.9.9", -] - [[package]] name = "parity-scale-codec" version = "1.3.7" @@ -4389,7 +4338,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9c2f795bc591cb3384cb64082a578b89207ac92bb89c9d98c1ea2ace7cd8110" dependencies = [ - "der 0.3.5", + "der", "spki", ] @@ -4485,7 +4434,7 @@ dependencies = [ "impl-codec 0.5.1", "impl-rlp 0.3.0", "impl-serde", - "uint 0.9.2", + "uint 0.9.3", ] [[package]] @@ -4498,7 +4447,7 @@ dependencies = [ "impl-codec 0.5.1", "impl-rlp 0.3.0", "impl-serde", - "uint 0.9.2", + "uint 0.9.3", ] [[package]] @@ -4577,6 +4526,29 @@ dependencies = [ "thiserror", ] +[[package]] +name = "prometheus-client" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9a896938cc6018c64f279888b8c7559d3725210d5db9a3a1ee6bc7188d51d34" +dependencies = [ + "dtoa", + "itoa 1.0.1", + "owning_ref", + "prometheus-client-derive-text-encode", +] + +[[package]] +name = "prometheus-client-derive-text-encode" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8e12d01b9d66ad9eb4529c57666b6263fc1993cb30261d83ead658fdd932652" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "prost" version = "0.9.0" @@ -4644,9 +4616,9 @@ dependencies = [ [[package]] name = "protobuf" -version = "2.26.0" +version = "2.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d613b4fd96c0182e187734b4f8fc5cbc8c940bbf781819f7a52d42dc5922d25" +checksum = "cf7e6d18738ecd0902d30d1ad232c9125985a3422929b16c65517b38adc14f96" [[package]] name = "psutil" @@ -4860,7 +4832,7 @@ version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90" dependencies = [ - "autocfg 1.0.1", + "autocfg 1.1.0", "crossbeam-deque", "either", "rayon-core", @@ -4980,17 +4952,6 @@ dependencies = [ "quick-error", ] -[[package]] -name = "rfc6979" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96ef608575f6392792f9ecf7890c00086591d29a83910939d430753f7c050525" -dependencies = [ - "crypto-bigint", - "hmac 0.11.0", - "zeroize", -] - [[package]] name = "ring" version = "0.16.20" @@ -5107,7 +5068,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.4", + "semver 1.0.5", ] [[package]] @@ -5249,18 +5210,6 @@ dependencies = [ "untrusted", ] -[[package]] -name = "sec1" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08da66b8b0965a5555b6bd6639e68ccba85e1e2506f5fbb089e93f8a04e1a2d1" -dependencies = [ - "der 0.5.1", - "generic-array", - "subtle", - "zeroize", -] - [[package]] name = "secp256k1" version = "0.20.3" @@ -5294,9 +5243,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.6.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a57321bf8bc2362081b2599912d2961fe899c0efadf1b4b2f8d48b3e253bb96c" +checksum = "0160a13a177a45bfb43ce71c01580998474f556ad854dcbca936dd2841a5c556" dependencies = [ "core-foundation-sys", "libc", @@ -5322,9 +5271,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "568a8e6258aa33c13358f81fd834adb854c6f7c9468520910a9b1e8fac068012" +checksum = "0486718e92ec9a68fbed73bb5ef687d71103b142595b406835649bebd33f72c7" [[package]] name = "semver-parser" @@ -5671,9 +5620,9 @@ dependencies = [ [[package]] name = "slog-term" -version = "2.8.0" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95c1e7e5aab61ced6006149ea772770b84a0d16ce0f7885def313e4829946d76" +checksum = "f3668dd2252f4381d64de0c79e6c8dc6bd509d1cab3535b35a3fc9bafd1241d5" dependencies = [ "atty", "chrono", @@ -5730,20 +5679,19 @@ checksum = "45456094d1983e2ee2a18fdfebce3189fa451699d0502cb8e3b49dba5ba41451" [[package]] name = "snow" -version = "0.8.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6142f7c25e94f6fd25a32c3348ec230df9109b463f59c8c7acc4bd34936babb7" +checksum = "774d05a3edae07ce6d68ea6984f3c05e9bba8927e3dd591e3b479e5b03213d0d" dependencies = [ "aes-gcm", "blake2", "chacha20poly1305", - "rand 0.8.4", + "curve25519-dalek 4.0.0-pre.1", "rand_core 0.6.3", "ring", - "rustc_version 0.3.3", - "sha2 0.9.9", + "rustc_version 0.4.0", + "sha2 0.10.1", "subtle", - "x25519-dalek", ] [[package]] @@ -5810,7 +5758,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9dae7e047abc519c96350e9484a96c6bf1492348af912fd3446dd2dc323f6268" dependencies = [ - "der 0.3.5", + "der", ] [[package]] @@ -6328,9 +6276,9 @@ checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" [[package]] name = "tracing" -version = "0.1.29" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "375a639232caf30edfc78e8d89b2d4c375515393e7af7e16f01cd96917fb2105" +checksum = "2d8d93354fe2a8e50d5953f5ae2e47a3fc2ef03292e7ea46e3cc38f549525fb9" dependencies = [ "cfg-if", "log", @@ -6341,9 +6289,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.18" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f480b8f81512e825f337ad51e94c1eb5d3bbdf2b363dcd01e2b19a9ffe3f8e" +checksum = "8276d9a4a3a558d7b7ad5303ad50b53d58264641b82914b7ada36bd762e7a716" dependencies = [ "proc-macro2", "quote", @@ -6352,11 +6300,12 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.21" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f4ed65637b8390770814083d20756f87bfa2c21bf2f110babdc5438351746e4" +checksum = "03cfcb51380632a72d3111cb8d3447a8d908e577d31beeac006f836383d29a23" dependencies = [ "lazy_static", + "valuable", ] [[package]] @@ -6382,9 +6331,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5312f325fe3588e277415f5a6cca1f4ccad0f248c4cd5a4bd33032d7286abc22" +checksum = "74786ce43333fcf51efe947aed9718fbe46d5c7328ec3f1029e818083966d9aa" dependencies = [ "ansi_term", "lazy_static", @@ -6465,9 +6414,9 @@ dependencies = [ [[package]] name = "trust-dns-proto" -version = "0.20.3" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0d7f5db438199a6e2609debe3f69f808d074e0a2888ee0bccb45fe234d03f4" +checksum = "ca94d4e9feb6a181c690c4040d7a24ef34018d8313ac5044a61d21222ae24e31" dependencies = [ "async-trait", "cfg-if", @@ -6490,9 +6439,9 @@ dependencies = [ [[package]] name = "trust-dns-resolver" -version = "0.20.3" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ad17b608a64bd0735e67bde16b0636f8aa8591f831a25d18443ed00a699770" +checksum = "ecae383baad9995efaa34ce8e57d12c3f305e545887472a492b838f4b5cfb77a" dependencies = [ "cfg-if", "futures-util", @@ -6634,9 +6583,9 @@ dependencies = [ [[package]] name = "uint" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b1b413ebfe8c2c74a69ff124699dd156a7fa41cb1d09ba6df94aa2f2b0a4a3a" +checksum = "12f03af7ccf01dd611cc450a0d10dbc9b745770d096473e2faf0ca6e2d66d1e0" dependencies = [ "arbitrary", "byteorder", @@ -6677,9 +6626,9 @@ dependencies = [ [[package]] name = "unicode-segmentation" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b" +checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" [[package]] name = "unicode-width" @@ -6826,6 +6775,12 @@ dependencies = [ "types", ] +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + [[package]] name = "vcpkg" version = "0.2.15" @@ -7250,7 +7205,7 @@ version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a0c105152107e3b96f6a00a65e86ce82d9b125230e1c4302940eca58ff71f4f" dependencies = [ - "curve25519-dalek", + "curve25519-dalek 3.2.0", "rand_core 0.5.1", "zeroize", ] @@ -7281,9 +7236,9 @@ dependencies = [ [[package]] name = "yamux" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7d9028f208dd5e63c614be69f115c1b53cacc1111437d4c765185856666c107" +checksum = "29d4c1dd079043fe673e79fe3c3a260ae2d2fb413f1062cae9e062748df0df03" dependencies = [ "futures", "log", @@ -7295,9 +7250,9 @@ dependencies = [ [[package]] name = "zeroize" -version = "1.5.2" +version = "1.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c88870063c39ee00ec285a2f8d6a966e5b6fb2becc4e8dac77ed0d370ed6006" +checksum = "d68d9dcec5f9b43a30d38c49f91dfedfaac384cb8f085faca366c26207dd1619" dependencies = [ "zeroize_derive", ] diff --git a/Makefile b/Makefile index f363854c32..a92da9bcc8 100644 --- a/Makefile +++ b/Makefile @@ -156,7 +156,7 @@ arbitrary-fuzz: # Runs cargo audit (Audit Cargo.lock files for crates with security vulnerabilities reported to the RustSec Advisory Database) audit: cargo install --force cargo-audit - cargo audit --ignore RUSTSEC-2020-0071 --ignore RUSTSEC-2020-0159 + cargo audit --ignore RUSTSEC-2020-0071 --ignore RUSTSEC-2020-0159 --ignore RUSTSEC-2022-0009 # Runs `cargo vendor` to make sure dependencies can be vendored for packaging, reproducibility and archival purpose. vendor: diff --git a/account_manager/src/validator/import.rs b/account_manager/src/validator/import.rs index f43dfcdb8f..4c7140df39 100644 --- a/account_manager/src/validator/import.rs +++ b/account_manager/src/validator/import.rs @@ -273,9 +273,15 @@ pub fn cli_run(matches: &ArgMatches, validator_dir: PathBuf) -> Result<(), Strin eprintln!("Successfully imported keystore."); num_imported_keystores += 1; - let validator_def = - ValidatorDefinition::new_keystore_with_password(&dest_keystore, password_opt, None) - .map_err(|e| format!("Unable to create new validator definition: {:?}", e))?; + let graffiti = None; + let suggested_fee_recipient = None; + let validator_def = ValidatorDefinition::new_keystore_with_password( + &dest_keystore, + password_opt, + graffiti, + suggested_fee_recipient, + ) + .map_err(|e| format!("Unable to create new validator definition: {:?}", e))?; defs.push(validator_def); diff --git a/beacon_node/Cargo.toml b/beacon_node/Cargo.toml index edcc26af7a..9d55003f52 100644 --- a/beacon_node/Cargo.toml +++ b/beacon_node/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "beacon_node" -version = "2.1.2" +version = "2.1.3" authors = ["Paul Hauner ", "Age Manning BeaconChain { } BeaconState::Merge(_) => { let sync_aggregate = get_sync_aggregate()?; - let execution_payload = get_execution_payload(self, &state)?; + let execution_payload = get_execution_payload(self, &state, proposer_index)?; BeaconBlock::Merge(BeaconBlockMerge { slot, proposer_index, diff --git a/beacon_node/beacon_chain/src/execution_payload.rs b/beacon_node/beacon_chain/src/execution_payload.rs index c19bba6126..21d51be99d 100644 --- a/beacon_node/beacon_chain/src/execution_payload.rs +++ b/beacon_node/beacon_chain/src/execution_payload.rs @@ -204,14 +204,16 @@ pub fn validate_execution_payload_for_gossip( pub fn get_execution_payload( chain: &BeaconChain, state: &BeaconState, + proposer_index: u64, ) -> Result, BlockProductionError> { - Ok(prepare_execution_payload_blocking(chain, state)?.unwrap_or_default()) + Ok(prepare_execution_payload_blocking(chain, state, proposer_index)?.unwrap_or_default()) } /// Wraps the async `prepare_execution_payload` function as a blocking task. pub fn prepare_execution_payload_blocking( chain: &BeaconChain, state: &BeaconState, + proposer_index: u64, ) -> Result>, BlockProductionError> { let execution_layer = chain .execution_layer @@ -219,7 +221,9 @@ pub fn prepare_execution_payload_blocking( .ok_or(BlockProductionError::ExecutionLayerMissing)?; execution_layer - .block_on_generic(|_| async { prepare_execution_payload(chain, state).await }) + .block_on_generic(|_| async { + prepare_execution_payload(chain, state, proposer_index).await + }) .map_err(BlockProductionError::BlockingFailed)? } @@ -240,6 +244,7 @@ pub fn prepare_execution_payload_blocking( pub async fn prepare_execution_payload( chain: &BeaconChain, state: &BeaconState, + proposer_index: u64, ) -> Result>, BlockProductionError> { let spec = &chain.spec; let execution_layer = chain @@ -300,6 +305,7 @@ pub async fn prepare_execution_payload( timestamp, random, finalized_block_hash.unwrap_or_else(Hash256::zero), + proposer_index, ) .await .map_err(BlockProductionError::GetPayloadFailed)?; diff --git a/beacon_node/client/src/builder.rs b/beacon_node/client/src/builder.rs index 550d89125e..f5045418ab 100644 --- a/beacon_node/client/src/builder.rs +++ b/beacon_node/client/src/builder.rs @@ -18,7 +18,7 @@ use eth2::{ }; use execution_layer::ExecutionLayer; use genesis::{interop_genesis_state, Eth1GenesisService, DEFAULT_ETH1_BLOCK_HASH}; -use lighthouse_network::{open_metrics_client::registry::Registry, NetworkGlobals}; +use lighthouse_network::{prometheus_client::registry::Registry, NetworkGlobals}; use monitoring_api::{MonitoringHttpClient, ProcessType}; use network::{NetworkConfig, NetworkMessage, NetworkService}; use slasher::Slasher; @@ -700,6 +700,11 @@ where // Spawn a routine that tracks the status of the execution engines. execution_layer.spawn_watchdog_routine(beacon_chain.slot_clock.clone()); + + // Spawn a routine that removes expired proposer preparations. + execution_layer.spawn_clean_proposer_preparation_routine::( + beacon_chain.slot_clock.clone(), + ); } } diff --git a/beacon_node/execution_layer/src/lib.rs b/beacon_node/execution_layer/src/lib.rs index 5c069f0b0b..cb267e5f0a 100644 --- a/beacon_node/execution_layer/src/lib.rs +++ b/beacon_node/execution_layer/src/lib.rs @@ -10,6 +10,7 @@ use lru::LruCache; use sensitive_url::SensitiveUrl; use slog::{crit, debug, error, info, Logger}; use slot_clock::SlotClock; +use std::collections::HashMap; use std::future::Future; use std::sync::Arc; use std::time::Duration; @@ -18,7 +19,7 @@ use tokio::{ sync::{Mutex, MutexGuard}, time::{sleep, sleep_until, Instant}, }; -use types::ChainSpec; +use types::{ChainSpec, Epoch, ProposerPreparationData}; pub use engine_api::{http::HttpJsonRpc, ExecutePayloadResponseStatus}; @@ -30,6 +31,16 @@ pub mod test_utils; /// in an LRU cache to avoid redundant lookups. This is the size of that cache. const EXECUTION_BLOCKS_LRU_CACHE_SIZE: usize = 128; +/// A fee recipient address for use during block production. Only used as a very last resort if +/// there is no address provided by the user. +/// +/// ## Note +/// +/// This is *not* the zero-address, since Geth has been known to return errors for a coinbase of +/// 0x00..00. +const DEFAULT_SUGGESTED_FEE_RECIPIENT: [u8; 20] = + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]; + #[derive(Debug)] pub enum Error { NoEngines, @@ -46,9 +57,16 @@ impl From for Error { } } +#[derive(Clone)] +pub struct ProposerPreparationDataEntry { + update_epoch: Epoch, + preparation_data: ProposerPreparationData, +} + struct Inner { engines: Engines, suggested_fee_recipient: Option
, + proposer_preparation_data: Mutex>, execution_blocks: Mutex>, executor: TaskExecutor, log: Logger, @@ -96,6 +114,7 @@ impl ExecutionLayer { log: log.clone(), }, suggested_fee_recipient, + proposer_preparation_data: Mutex::new(HashMap::new()), execution_blocks: Mutex::new(LruCache::new(EXECUTION_BLOCKS_LRU_CACHE_SIZE)), executor, log, @@ -116,17 +135,18 @@ impl ExecutionLayer { &self.inner.executor } - fn suggested_fee_recipient(&self) -> Result { - self.inner - .suggested_fee_recipient - .ok_or(Error::FeeRecipientUnspecified) - } - /// Note: this function returns a mutex guard, be careful to avoid deadlocks. async fn execution_blocks(&self) -> MutexGuard<'_, LruCache> { self.inner.execution_blocks.lock().await } + /// Note: this function returns a mutex guard, be careful to avoid deadlocks. + async fn proposer_preparation_data( + &self, + ) -> MutexGuard<'_, HashMap> { + self.inner.proposer_preparation_data.lock().await + } + fn log(&self) -> &Logger { &self.inner.log } @@ -234,11 +254,124 @@ impl ExecutionLayer { self.engines().upcheck_not_synced(Logging::Disabled).await; } + /// Spawns a routine which cleans the cached proposer preparations periodically. + pub fn spawn_clean_proposer_preparation_routine( + &self, + slot_clock: S, + ) { + let preparation_cleaner = |el: ExecutionLayer| async move { + // Start the loop to periodically clean proposer preparation cache. + loop { + if let Some(duration_to_next_epoch) = + slot_clock.duration_to_next_epoch(T::slots_per_epoch()) + { + // Wait for next epoch + sleep(duration_to_next_epoch).await; + + match slot_clock + .now() + .map(|slot| slot.epoch(T::slots_per_epoch())) + { + Some(current_epoch) => el + .clean_proposer_preparation(current_epoch) + .await + .map_err(|e| { + error!( + el.log(), + "Failed to clean proposer preparation cache"; + "error" => format!("{:?}", e) + ) + }) + .unwrap_or(()), + None => error!(el.log(), "Failed to get current epoch from slot clock"), + } + } else { + error!(el.log(), "Failed to read slot clock"); + // If we can't read the slot clock, just wait another slot and retry. + sleep(slot_clock.slot_duration()).await; + } + } + }; + + self.spawn(preparation_cleaner, "exec_preparation_cleanup"); + } + /// Returns `true` if there is at least one synced and reachable engine. pub async fn is_synced(&self) -> bool { self.engines().any_synced().await } + /// Updates the proposer preparation data provided by validators + pub fn update_proposer_preparation_blocking( + &self, + update_epoch: Epoch, + preparation_data: &[ProposerPreparationData], + ) -> Result<(), Error> { + self.block_on_generic(|_| async move { + self.update_proposer_preparation(update_epoch, preparation_data) + .await + })? + } + + /// Updates the proposer preparation data provided by validators + async fn update_proposer_preparation( + &self, + update_epoch: Epoch, + preparation_data: &[ProposerPreparationData], + ) -> Result<(), Error> { + let mut proposer_preparation_data = self.proposer_preparation_data().await; + for preparation_entry in preparation_data { + proposer_preparation_data.insert( + preparation_entry.validator_index, + ProposerPreparationDataEntry { + update_epoch, + preparation_data: preparation_entry.clone(), + }, + ); + } + + Ok(()) + } + + /// Removes expired entries from cached proposer preparations + async fn clean_proposer_preparation(&self, current_epoch: Epoch) -> Result<(), Error> { + let mut proposer_preparation_data = self.proposer_preparation_data().await; + + // Keep all entries that have been updated in the last 2 epochs + let retain_epoch = current_epoch.saturating_sub(Epoch::new(2)); + proposer_preparation_data.retain(|_validator_index, preparation_entry| { + preparation_entry.update_epoch >= retain_epoch + }); + + Ok(()) + } + + /// Returns the fee-recipient address that should be used to build a block + async fn get_suggested_fee_recipient(&self, proposer_index: u64) -> Address { + if let Some(preparation_data_entry) = + self.proposer_preparation_data().await.get(&proposer_index) + { + // The values provided via the API have first priority. + preparation_data_entry.preparation_data.fee_recipient + } else if let Some(address) = self.inner.suggested_fee_recipient { + // If there has been no fee recipient provided via the API, but the BN has been provided + // with a global default address, use that. + address + } else { + // If there is no user-provided fee recipient, use a junk value and complain loudly. + crit!( + self.log(), + "Fee recipient unknown"; + "msg" => "the suggested_fee_recipient was unknown during block production. \ + a junk address was used, rewards were lost! \ + check the --suggested-fee-recipient flag and VC configuration.", + "proposer_index" => ?proposer_index + ); + + Address::from_slice(&DEFAULT_SUGGESTED_FEE_RECIPIENT) + } + } + /// Maps to the `engine_getPayload` JSON-RPC call. /// /// However, it will attempt to call `self.prepare_payload` if it cannot find an existing @@ -254,8 +387,10 @@ impl ExecutionLayer { timestamp: u64, random: Hash256, finalized_block_hash: Hash256, + proposer_index: u64, ) -> Result, Error> { - let suggested_fee_recipient = self.suggested_fee_recipient()?; + let suggested_fee_recipient = self.get_suggested_fee_recipient(proposer_index).await; + debug!( self.log(), "Issuing engine_getPayload"; diff --git a/beacon_node/execution_layer/src/test_utils/mock_execution_layer.rs b/beacon_node/execution_layer/src/test_utils/mock_execution_layer.rs index 59345bc01f..295e82914b 100644 --- a/beacon_node/execution_layer/src/test_utils/mock_execution_layer.rs +++ b/beacon_node/execution_layer/src/test_utils/mock_execution_layer.rs @@ -127,9 +127,16 @@ impl MockExecutionLayer { .await .unwrap(); + let validator_index = 0; let payload = self .el - .get_payload::(parent_hash, timestamp, random, finalized_block_hash) + .get_payload::( + parent_hash, + timestamp, + random, + finalized_block_hash, + validator_index, + ) .await .unwrap(); let block_hash = payload.block_hash; diff --git a/beacon_node/http_api/src/lib.rs b/beacon_node/http_api/src/lib.rs index b30af858f7..5ef845858a 100644 --- a/beacon_node/http_api/src/lib.rs +++ b/beacon_node/http_api/src/lib.rs @@ -45,9 +45,9 @@ use tokio::sync::mpsc::UnboundedSender; use tokio_stream::{wrappers::BroadcastStream, StreamExt}; use types::{ Attestation, AttesterSlashing, BeaconStateError, CommitteeCache, ConfigAndPreset, Epoch, - EthSpec, ForkName, ProposerSlashing, RelativeEpoch, SignedAggregateAndProof, SignedBeaconBlock, - SignedContributionAndProof, SignedVoluntaryExit, Slot, SyncCommitteeMessage, - SyncContributionData, + EthSpec, ForkName, ProposerPreparationData, ProposerSlashing, RelativeEpoch, + SignedAggregateAndProof, SignedBeaconBlock, SignedContributionAndProof, SignedVoluntaryExit, + Slot, SyncCommitteeMessage, SyncContributionData, }; use version::{ add_consensus_version_header, fork_versioned_response, inconsistent_fork_rejection, @@ -2186,6 +2186,53 @@ pub fn serve( }, ); + // POST validator/prepare_beacon_proposer + let post_validator_prepare_beacon_proposer = eth1_v1 + .and(warp::path("validator")) + .and(warp::path("prepare_beacon_proposer")) + .and(warp::path::end()) + .and(not_while_syncing_filter.clone()) + .and(chain_filter.clone()) + .and(warp::addr::remote()) + .and(log_filter.clone()) + .and(warp::body::json()) + .and_then( + |chain: Arc>, + client_addr: Option, + log: Logger, + preparation_data: Vec| { + blocking_json_task(move || { + let execution_layer = chain + .execution_layer + .as_ref() + .ok_or(BeaconChainError::ExecutionLayerMissing) + .map_err(warp_utils::reject::beacon_chain_error)?; + let current_epoch = chain + .epoch() + .map_err(warp_utils::reject::beacon_chain_error)?; + + debug!( + log, + "Received proposer preparation data"; + "count" => preparation_data.len(), + "client" => client_addr + .map(|a| a.to_string()) + .unwrap_or_else(|| "unknown".to_string()), + ); + + execution_layer + .update_proposer_preparation_blocking(current_epoch, &preparation_data) + .map_err(|_e| { + warp_utils::reject::custom_bad_request( + "error processing proposer preparations".to_string(), + ) + })?; + + Ok(()) + }) + }, + ); + // POST validator/sync_committee_subscriptions let post_validator_sync_committee_subscriptions = eth1_v1 .and(warp::path("validator")) @@ -2710,6 +2757,7 @@ pub fn serve( .or(post_validator_contribution_and_proofs.boxed()) .or(post_validator_beacon_committee_subscriptions.boxed()) .or(post_validator_sync_committee_subscriptions.boxed()) + .or(post_validator_prepare_beacon_proposer.boxed()) .or(post_lighthouse_liveness.boxed()) .or(post_lighthouse_database_reconstruct.boxed()) .or(post_lighthouse_database_historical_blocks.boxed()), diff --git a/beacon_node/http_api/tests/common.rs b/beacon_node/http_api/tests/common.rs index 758c29a60f..cf20e2f686 100644 --- a/beacon_node/http_api/tests/common.rs +++ b/beacon_node/http_api/tests/common.rs @@ -120,7 +120,7 @@ pub async fn create_api_server( send_back_addr: EXTERNAL_ADDR.parse().unwrap(), }; let con_id = ConnectionId::new(1); - pm.inject_connection_established(&peer_id, &con_id, &connected_point, None); + pm.inject_connection_established(&peer_id, &con_id, &connected_point, None, 0); *network_globals.sync_state.write() = SyncState::Synced; let eth1_service = eth1::Service::new(eth1::Config::default(), log.clone(), chain.spec.clone()); diff --git a/beacon_node/http_metrics/src/lib.rs b/beacon_node/http_metrics/src/lib.rs index 89e6a8e2d1..729d92ee9e 100644 --- a/beacon_node/http_metrics/src/lib.rs +++ b/beacon_node/http_metrics/src/lib.rs @@ -4,7 +4,7 @@ mod metrics; use beacon_chain::{BeaconChain, BeaconChainTypes}; -use lighthouse_network::open_metrics_client::registry::Registry; +use lighthouse_network::prometheus_client::registry::Registry; use lighthouse_version::version_with_platform; use serde::{Deserialize, Serialize}; use slog::{crit, info, Logger}; diff --git a/beacon_node/http_metrics/src/metrics.rs b/beacon_node/http_metrics/src/metrics.rs index 66c961956c..9b15694211 100644 --- a/beacon_node/http_metrics/src/metrics.rs +++ b/beacon_node/http_metrics/src/metrics.rs @@ -1,7 +1,7 @@ use crate::Context; use beacon_chain::BeaconChainTypes; use lighthouse_metrics::{Encoder, TextEncoder}; -use lighthouse_network::open_metrics_client::encoding::text::encode; +use lighthouse_network::prometheus_client::encoding::text::encode; use malloc_utils::scrape_allocator_metrics; pub use lighthouse_metrics::*; diff --git a/beacon_node/lighthouse_network/Cargo.toml b/beacon_node/lighthouse_network/Cargo.toml index 31dfab271e..bb30aac55a 100644 --- a/beacon_node/lighthouse_network/Cargo.toml +++ b/beacon_node/lighthouse_network/Cargo.toml @@ -38,15 +38,14 @@ directory = { path = "../../common/directory" } regex = "1.3.9" strum = { version = "0.21.0", features = ["derive"] } superstruct = "0.4.0" -open-metrics-client = "0.13.0" +prometheus-client = "0.15.0" [dependencies.libp2p] -# version = "0.41.0" +git = "https://github.com/sigp/rust-libp2p" +# branch libp2p-gossipsub-interval-hotfix +rev = "e213703e616eaba3c482d7714775e0d37c4ae8e5" default-features = false -git = "https://github.com/libp2p/rust-libp2p" -# Latest libp2p master -rev = "17861d9cac121f7e448585a7f052d5eab4618826" -features = ["websocket", "identify", "mplex", "yamux", "noise", "gossipsub", "dns-tokio", "tcp-tokio", "plaintext"] +features = ["websocket", "identify", "mplex", "yamux", "noise", "gossipsub", "dns-tokio", "tcp-tokio", "plaintext", "secp256k1"] [dev-dependencies] slog-term = "2.6.0" diff --git a/beacon_node/lighthouse_network/src/behaviour/gossip_cache.rs b/beacon_node/lighthouse_network/src/behaviour/gossip_cache.rs new file mode 100644 index 0000000000..93687e555b --- /dev/null +++ b/beacon_node/lighthouse_network/src/behaviour/gossip_cache.rs @@ -0,0 +1,247 @@ +use std::collections::hash_map::Entry; +use std::collections::HashMap; +use std::pin::Pin; +use std::task::{Context, Poll}; +use std::time::Duration; + +use crate::types::GossipKind; +use crate::GossipTopic; + +use tokio_util::time::delay_queue::{DelayQueue, Key}; + +/// Store of gossip messages that we failed to publish and will try again later. By default, all +/// messages are ignored. This behaviour can be changed using `GossipCacheBuilder::default_timeout` +/// to apply the same delay to every kind. Individual timeouts for specific kinds can be set and +/// will overwrite the default_timeout if present. +pub struct GossipCache { + /// Expire timeouts for each topic-msg pair. + expirations: DelayQueue<(GossipTopic, Vec)>, + /// Messages cached for each topic. + topic_msgs: HashMap, Key>>, + /// Timeout for blocks. + beacon_block: Option, + /// Timeout for aggregate attestations. + aggregates: Option, + /// Timeout for attestations. + attestation: Option, + /// Timeout for voluntary exits. + voluntary_exit: Option, + /// Timeout for proposer slashings. + proposer_slashing: Option, + /// Timeout for attester slashings. + attester_slashing: Option, + /// Timeout for aggregated sync commitee signatures. + signed_contribution_and_proof: Option, + /// Timeout for sync commitee messages. + sync_committee_message: Option, +} + +#[derive(Default)] +pub struct GossipCacheBuilder { + default_timeout: Option, + /// Timeout for blocks. + beacon_block: Option, + /// Timeout for aggregate attestations. + aggregates: Option, + /// Timeout for attestations. + attestation: Option, + /// Timeout for voluntary exits. + voluntary_exit: Option, + /// Timeout for proposer slashings. + proposer_slashing: Option, + /// Timeout for attester slashings. + attester_slashing: Option, + /// Timeout for aggregated sync commitee signatures. + signed_contribution_and_proof: Option, + /// Timeout for sync commitee messages. + sync_committee_message: Option, +} + +#[allow(dead_code)] +impl GossipCacheBuilder { + /// By default, all timeouts all disabled. Setting a default timeout will enable all timeout + /// that are not already set. + pub fn default_timeout(mut self, timeout: Duration) -> Self { + self.default_timeout = Some(timeout); + self + } + /// Timeout for blocks. + pub fn beacon_block_timeout(mut self, timeout: Duration) -> Self { + self.beacon_block = Some(timeout); + self + } + + /// Timeout for aggregate attestations. + pub fn aggregates_timeout(mut self, timeout: Duration) -> Self { + self.aggregates = Some(timeout); + self + } + + /// Timeout for attestations. + pub fn attestation_timeout(mut self, timeout: Duration) -> Self { + self.attestation = Some(timeout); + self + } + + /// Timeout for voluntary exits. + pub fn voluntary_exit_timeout(mut self, timeout: Duration) -> Self { + self.voluntary_exit = Some(timeout); + self + } + + /// Timeout for proposer slashings. + pub fn proposer_slashing_timeout(mut self, timeout: Duration) -> Self { + self.proposer_slashing = Some(timeout); + self + } + + /// Timeout for attester slashings. + pub fn attester_slashing_timeout(mut self, timeout: Duration) -> Self { + self.attester_slashing = Some(timeout); + self + } + + /// Timeout for aggregated sync commitee signatures. + pub fn signed_contribution_and_proof_timeout(mut self, timeout: Duration) -> Self { + self.signed_contribution_and_proof = Some(timeout); + self + } + + /// Timeout for sync commitee messages. + pub fn sync_committee_message_timeout(mut self, timeout: Duration) -> Self { + self.sync_committee_message = Some(timeout); + self + } + + pub fn build(self) -> GossipCache { + let GossipCacheBuilder { + default_timeout, + beacon_block, + aggregates, + attestation, + voluntary_exit, + proposer_slashing, + attester_slashing, + signed_contribution_and_proof, + sync_committee_message, + } = self; + GossipCache { + expirations: DelayQueue::default(), + topic_msgs: HashMap::default(), + beacon_block: beacon_block.or(default_timeout), + aggregates: aggregates.or(default_timeout), + attestation: attestation.or(default_timeout), + voluntary_exit: voluntary_exit.or(default_timeout), + proposer_slashing: proposer_slashing.or(default_timeout), + attester_slashing: attester_slashing.or(default_timeout), + signed_contribution_and_proof: signed_contribution_and_proof.or(default_timeout), + sync_committee_message: sync_committee_message.or(default_timeout), + } + } +} + +impl GossipCache { + /// Get a builder of a `GossipCache`. Topic kinds for which no timeout is defined will be + /// ignored if added in `insert`. + pub fn builder() -> GossipCacheBuilder { + GossipCacheBuilder::default() + } + + // Insert a message to be sent later. + pub fn insert(&mut self, topic: GossipTopic, data: Vec) { + let expire_timeout = match topic.kind() { + GossipKind::BeaconBlock => self.beacon_block, + GossipKind::BeaconAggregateAndProof => self.aggregates, + GossipKind::Attestation(_) => self.attestation, + GossipKind::VoluntaryExit => self.voluntary_exit, + GossipKind::ProposerSlashing => self.proposer_slashing, + GossipKind::AttesterSlashing => self.attester_slashing, + GossipKind::SignedContributionAndProof => self.signed_contribution_and_proof, + GossipKind::SyncCommitteeMessage(_) => self.sync_committee_message, + }; + let expire_timeout = match expire_timeout { + Some(expire_timeout) => expire_timeout, + None => return, + }; + match self + .topic_msgs + .entry(topic.clone()) + .or_default() + .entry(data.clone()) + { + Entry::Occupied(key) => self.expirations.reset(key.get(), expire_timeout), + Entry::Vacant(entry) => { + let key = self.expirations.insert((topic, data), expire_timeout); + entry.insert(key); + } + } + } + + // Get the registered messages for this topic. + pub fn retrieve(&mut self, topic: &GossipTopic) -> Option> + '_> { + if let Some(msgs) = self.topic_msgs.remove(topic) { + for (_, key) in msgs.iter() { + self.expirations.remove(key); + } + Some(msgs.into_keys()) + } else { + None + } + } +} + +impl futures::stream::Stream for GossipCache { + type Item = Result; // We don't care to retrieve the expired data. + + fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + match self.expirations.poll_expired(cx) { + Poll::Ready(Some(Ok(expired))) => { + let expected_key = expired.key(); + let (topic, data) = expired.into_inner(); + match self.topic_msgs.get_mut(&topic) { + Some(msgs) => { + let key = msgs.remove(&data); + debug_assert_eq!(key, Some(expected_key)); + if msgs.is_empty() { + // no more messages for this topic. + self.topic_msgs.remove(&topic); + } + } + None => { + #[cfg(debug_assertions)] + panic!("Topic for registered message is not present.") + } + } + Poll::Ready(Some(Ok(topic))) + } + Poll::Ready(Some(Err(x))) => Poll::Ready(Some(Err(x.to_string()))), + Poll::Ready(None) => Poll::Ready(None), + Poll::Pending => Poll::Pending, + } + } +} + +#[cfg(test)] +mod tests { + use crate::types::GossipKind; + + use super::*; + use futures::stream::StreamExt; + + #[tokio::test] + async fn test_stream() { + let mut cache = GossipCache::builder() + .default_timeout(Duration::from_millis(300)) + .build(); + let test_topic = GossipTopic::new( + GossipKind::Attestation(1u64.into()), + crate::types::GossipEncoding::SSZSnappy, + [0u8; 4], + ); + cache.insert(test_topic, vec![]); + tokio::time::sleep(Duration::from_millis(300)).await; + while cache.next().await.is_some() {} + assert!(cache.expirations.is_empty()); + assert!(cache.topic_msgs.is_empty()); + } +} diff --git a/beacon_node/lighthouse_network/src/behaviour/mod.rs b/beacon_node/lighthouse_network/src/behaviour/mod.rs index 2a79961094..d3f9b40c42 100644 --- a/beacon_node/lighthouse_network/src/behaviour/mod.rs +++ b/beacon_node/lighthouse_network/src/behaviour/mod.rs @@ -15,6 +15,8 @@ use crate::types::{ }; use crate::Eth2Enr; use crate::{error, metrics, Enr, NetworkGlobals, PubsubMessage, TopicHash}; +use futures::stream::StreamExt; +use libp2p::gossipsub::error::PublishError; use libp2p::{ core::{ connection::ConnectionId, identity::Keypair, multiaddr::Protocol as MProtocol, Multiaddr, @@ -50,6 +52,9 @@ use types::{ SignedBeaconBlock, Slot, SubnetId, SyncSubnetId, }; +use self::gossip_cache::GossipCache; + +mod gossip_cache; pub mod gossipsub_scoring_parameters; /// The number of peers we target per subnet for discovery queries. @@ -177,6 +182,8 @@ pub struct Behaviour { /// The interval for updating gossipsub scores #[behaviour(ignore)] update_gossipsub_scores: tokio::time::Interval, + #[behaviour(ignore)] + gossip_cache: GossipCache, /// Logger for behaviour actions. #[behaviour(ignore)] log: slog::Logger, @@ -280,6 +287,21 @@ impl Behaviour { ..Default::default() }; + let slot_duration = std::time::Duration::from_secs(ctx.chain_spec.seconds_per_slot); + let half_epoch = std::time::Duration::from_secs( + ctx.chain_spec.seconds_per_slot * TSpec::slots_per_epoch() / 2, + ); + let gossip_cache = GossipCache::builder() + .beacon_block_timeout(slot_duration) + .aggregates_timeout(half_epoch) + .attestation_timeout(half_epoch) + .voluntary_exit_timeout(half_epoch * 2) + .proposer_slashing_timeout(half_epoch * 2) + .attester_slashing_timeout(half_epoch * 2) + // .signed_contribution_and_proof_timeout(timeout) // Do not retry + // .sync_committee_message_timeout(timeout) // Do not retry + .build(); + Ok(Behaviour { // Sub-behaviours gossipsub, @@ -297,6 +319,7 @@ impl Behaviour { log: behaviour_log, score_settings, fork_context: ctx.fork_context, + gossip_cache, update_gossipsub_scores, }) } @@ -422,9 +445,11 @@ impl Behaviour { for message in messages { for topic in message.topics(GossipEncoding::default(), self.enr_fork_id.fork_digest) { let message_data = message.encode(GossipEncoding::default()); - if let Err(e) = self.gossipsub.publish(topic.clone().into(), message_data) { - slog::warn!(self.log, "Could not publish message"; - "error" => ?e); + if let Err(e) = self + .gossipsub + .publish(topic.clone().into(), message_data.clone()) + { + slog::warn!(self.log, "Could not publish message"; "error" => ?e); // add to metrics match topic.kind() { @@ -445,6 +470,10 @@ impl Behaviour { }; } } + + if let PublishError::InsufficientPeers = e { + self.gossip_cache.insert(topic, message_data); + } } } } @@ -788,7 +817,11 @@ impl Behaviour { for peer_id in peers_to_dial { debug!(self.log, "Dialing cached ENR peer"; "peer_id" => %peer_id); // Remove the ENR from the cache to prevent continual re-dialing on disconnects + self.discovery.remove_cached_enr(&peer_id); + // For any dial event, inform the peer manager + let enr = self.discovery_mut().enr_of_peer(&peer_id); + self.peer_manager.inject_dialing(&peer_id, enr); self.internal_events .push_back(InternalBehaviourMessage::DialPeer(peer_id)); } @@ -868,11 +901,39 @@ impl NetworkBehaviourEventProcess for Behaviour< } } GossipsubEvent::Subscribed { peer_id, topic } => { - if let Some(subnet_id) = subnet_from_topic_hash(&topic) { - self.network_globals - .peers - .write() - .add_subscription(&peer_id, subnet_id); + if let Ok(topic) = GossipTopic::decode(topic.as_str()) { + if let Some(subnet_id) = topic.subnet_id() { + self.network_globals + .peers + .write() + .add_subscription(&peer_id, subnet_id); + } + // Try to send the cached messages for this topic + if let Some(msgs) = self.gossip_cache.retrieve(&topic) { + for data in msgs { + let topic_str: &str = topic.kind().as_ref(); + match self.gossipsub.publish(topic.clone().into(), data) { + Ok(_) => { + warn!(self.log, "Gossip message published on retry"; "topic" => topic_str); + if let Some(v) = metrics::get_int_counter( + &metrics::GOSSIP_LATE_PUBLISH_PER_TOPIC_KIND, + &[topic_str], + ) { + v.inc() + }; + } + Err(e) => { + warn!(self.log, "Gossip message publish failed on retry"; "topic" => topic_str, "error" => %e); + if let Some(v) = metrics::get_int_counter( + &metrics::GOSSIP_FAILED_LATE_PUBLISH_PER_TOPIC_KIND, + &[topic_str], + ) { + v.inc() + }; + } + } + } + } } } GossipsubEvent::Unsubscribed { peer_id, topic } => { @@ -1044,6 +1105,9 @@ impl NetworkBehaviourEventProcess for Behaviour< let to_dial_peers = self.peer_manager.peers_discovered(results); for peer_id in to_dial_peers { debug!(self.log, "Dialing discovered peer"; "peer_id" => %peer_id); + // For any dial event, inform the peer manager + let enr = self.discovery_mut().enr_of_peer(&peer_id); + self.peer_manager.inject_dialing(&peer_id, enr); self.internal_events .push_back(InternalBehaviourMessage::DialPeer(peer_id)); } @@ -1095,9 +1159,6 @@ impl Behaviour { if let Some(event) = self.internal_events.pop_front() { match event { InternalBehaviourMessage::DialPeer(peer_id) => { - // For any dial event, inform the peer manager - let enr = self.discovery_mut().enr_of_peer(&peer_id); - self.peer_manager.inject_dialing(&peer_id, enr); // Submit the event let handler = self.new_handler(); return Poll::Ready(NBAction::Dial { @@ -1125,6 +1186,21 @@ impl Behaviour { self.peer_manager.update_gossipsub_scores(&self.gossipsub); } + // poll the gossipsub cache to clear expired messages + while let Poll::Ready(Some(result)) = self.gossip_cache.poll_next_unpin(cx) { + match result { + Err(e) => warn!(self.log, "Gossip cache error"; "error" => e), + Ok(expired_topic) => { + if let Some(v) = metrics::get_int_counter( + &metrics::GOSSIP_EXPIRED_LATE_PUBLISH_PER_TOPIC_KIND, + &[expired_topic.kind().as_ref()], + ) { + v.inc() + }; + } + } + } + Poll::Pending } } diff --git a/beacon_node/lighthouse_network/src/discovery/mod.rs b/beacon_node/lighthouse_network/src/discovery/mod.rs index 34c29a44d1..5cc059c2a8 100644 --- a/beacon_node/lighthouse_network/src/discovery/mod.rs +++ b/beacon_node/lighthouse_network/src/discovery/mod.rs @@ -927,24 +927,6 @@ impl NetworkBehaviour for Discovery { } } - fn inject_connected(&mut self, _peer_id: &PeerId) {} - fn inject_disconnected(&mut self, _peer_id: &PeerId) {} - fn inject_connection_established( - &mut self, - _peer_id: &PeerId, - _connection_id: &ConnectionId, - _endpoint: &ConnectedPoint, - _failed_addresses: Option<&Vec>, - ) { - } - fn inject_connection_closed( - &mut self, - _: &PeerId, - _: &ConnectionId, - _connected_point: &ConnectedPoint, - _handler: Self::ProtocolsHandler, - ) { - } fn inject_event( &mut self, _: PeerId, @@ -963,10 +945,11 @@ impl NetworkBehaviour for Discovery { match error { DialError::Banned | DialError::LocalPeerId - | DialError::InvalidPeerId + | DialError::InvalidPeerId(_) | DialError::ConnectionIo(_) | DialError::NoAddresses - | DialError::Transport(_) => { + | DialError::Transport(_) + | DialError::WrongPeerId { .. } => { // set peer as disconnected in discovery DHT debug!(self.log, "Marking peer disconnected in DHT"; "peer_id" => %peer_id); self.disconnect_peer(&peer_id); diff --git a/beacon_node/lighthouse_network/src/lib.rs b/beacon_node/lighthouse_network/src/lib.rs index 0460a42c8a..f679b7e657 100644 --- a/beacon_node/lighthouse_network/src/lib.rs +++ b/beacon_node/lighthouse_network/src/lib.rs @@ -67,7 +67,7 @@ pub use crate::types::{ SubnetDiscovery, }; -pub use open_metrics_client; +pub use prometheus_client; pub use behaviour::{BehaviourEvent, Gossipsub, PeerRequestId, Request, Response}; pub use config::Config as NetworkConfig; diff --git a/beacon_node/lighthouse_network/src/metrics.rs b/beacon_node/lighthouse_network/src/metrics.rs index 1dfe0448b7..66d7a1f74a 100644 --- a/beacon_node/lighthouse_network/src/metrics.rs +++ b/beacon_node/lighthouse_network/src/metrics.rs @@ -81,14 +81,30 @@ lazy_static! { "Gossipsub messages that we did not accept, per client", &["client", "validation_result"] ); - + pub static ref GOSSIP_LATE_PUBLISH_PER_TOPIC_KIND: Result = + try_create_int_counter_vec( + "gossipsub_late_publish_per_topic_kind", + "Messages published late to gossipsub per topic kind.", + &["topic_kind"] + ); + pub static ref GOSSIP_EXPIRED_LATE_PUBLISH_PER_TOPIC_KIND: Result = + try_create_int_counter_vec( + "gossipsub_expired_late_publish_per_topic_kind", + "Messages that expired waiting to be published on retry to gossipsub per topic kind.", + &["topic_kind"] + ); + pub static ref GOSSIP_FAILED_LATE_PUBLISH_PER_TOPIC_KIND: Result = + try_create_int_counter_vec( + "gossipsub_failed_late_publish_per_topic_kind", + "Messages that failed to be published on retry to gossipsub per topic kind.", + &["topic_kind"] + ); pub static ref PEER_SCORE_DISTRIBUTION: Result = try_create_int_gauge_vec( "peer_score_distribution", "The distribution of connected peer scores", &["position"] ); - pub static ref PEER_SCORE_PER_CLIENT: Result = try_create_float_gauge_vec( "peer_score_per_client", diff --git a/beacon_node/lighthouse_network/src/peer_manager/network_behaviour.rs b/beacon_node/lighthouse_network/src/peer_manager/network_behaviour.rs index b787c421cf..d4eef65b0c 100644 --- a/beacon_node/lighthouse_network/src/peer_manager/network_behaviour.rs +++ b/beacon_node/lighthouse_network/src/peer_manager/network_behaviour.rs @@ -110,6 +110,7 @@ impl NetworkBehaviour for PeerManager { _connection_id: &ConnectionId, endpoint: &ConnectedPoint, _failed_addresses: Option<&Vec>, + _other_established: usize, ) { debug!(self.log, "Connection established"; "peer_id" => %peer_id, "connection" => ?endpoint.to_endpoint()); // Check NAT if metrics are enabled @@ -161,7 +162,7 @@ impl NetworkBehaviour for PeerManager { self.events .push(PeerManagerEvent::PeerConnectedIncoming(*peer_id)); } - ConnectedPoint::Dialer { address } => { + ConnectedPoint::Dialer { address, .. } => { self.inject_connect_outgoing(peer_id, address.clone(), None); self.events .push(PeerManagerEvent::PeerConnectedOutgoing(*peer_id)); @@ -172,8 +173,18 @@ impl NetworkBehaviour for PeerManager { self.update_connected_peer_metrics(); metrics::inc_counter(&metrics::PEER_CONNECT_EVENT_COUNT); } + fn inject_connection_closed( + &mut self, + peer_id: &PeerId, + _: &ConnectionId, + _: &ConnectedPoint, + _: DummyProtocolsHandler, + remaining_established: usize, + ) { + if remaining_established > 0 { + return; + } - fn inject_disconnected(&mut self, peer_id: &PeerId) { // There are no more connections if self .network_globals diff --git a/beacon_node/lighthouse_network/src/rpc/mod.rs b/beacon_node/lighthouse_network/src/rpc/mod.rs index ebd6240616..8c4a0b14c7 100644 --- a/beacon_node/lighthouse_network/src/rpc/mod.rs +++ b/beacon_node/lighthouse_network/src/rpc/mod.rs @@ -202,36 +202,25 @@ where } // Use connection established/closed instead of these currently - fn inject_connected(&mut self, peer_id: &PeerId) { - // find the peer's meta-data - debug!(self.log, "Requesting new peer's metadata"; "peer_id" => %peer_id); - let rpc_event = - RPCSend::Request(RequestId::Behaviour, OutboundRequest::MetaData(PhantomData)); - self.events.push(NetworkBehaviourAction::NotifyHandler { - peer_id: *peer_id, - handler: NotifyHandler::Any, - event: rpc_event, - }); - } - - fn inject_disconnected(&mut self, _peer_id: &PeerId) {} - fn inject_connection_established( &mut self, - _peer_id: &PeerId, + peer_id: &PeerId, _connection_id: &ConnectionId, _endpoint: &ConnectedPoint, _failed_addresses: Option<&Vec>, + other_established: usize, ) { - } - - fn inject_connection_closed( - &mut self, - _peer_id: &PeerId, - _: &ConnectionId, - _connected_point: &ConnectedPoint, - _handler: Self::ProtocolsHandler, - ) { + if other_established == 0 { + // find the peer's meta-data + debug!(self.log, "Requesting new peer's metadata"; "peer_id" => %peer_id); + let rpc_event = + RPCSend::Request(RequestId::Behaviour, OutboundRequest::MetaData(PhantomData)); + self.events.push(NetworkBehaviourAction::NotifyHandler { + peer_id: *peer_id, + handler: NotifyHandler::Any, + event: rpc_event, + }); + } } fn inject_event( diff --git a/beacon_node/lighthouse_network/src/service.rs b/beacon_node/lighthouse_network/src/service.rs index 0ccdd28fdf..7dcee51870 100644 --- a/beacon_node/lighthouse_network/src/service.rs +++ b/beacon_node/lighthouse_network/src/service.rs @@ -21,7 +21,7 @@ use libp2p::{ swarm::{SwarmBuilder, SwarmEvent}, PeerId, Swarm, Transport, }; -use open_metrics_client::registry::Registry; +use prometheus_client::registry::Registry; use slog::{crit, debug, info, o, trace, warn, Logger}; use ssz::Decode; use std::fs::File; diff --git a/beacon_node/lighthouse_network/src/types/topics.rs b/beacon_node/lighthouse_network/src/types/topics.rs index f9860a003f..3dd7ad8470 100644 --- a/beacon_node/lighthouse_network/src/types/topics.rs +++ b/beacon_node/lighthouse_network/src/types/topics.rs @@ -159,6 +159,14 @@ impl GossipTopic { Err(format!("Unknown topic: {}", topic)) } + + pub fn subnet_id(&self) -> Option { + match self.kind() { + GossipKind::Attestation(subnet_id) => Some(Subnet::Attestation(*subnet_id)), + GossipKind::SyncCommitteeMessage(subnet_id) => Some(Subnet::SyncCommittee(*subnet_id)), + _ => None, + } + } } impl From for Topic { @@ -237,12 +245,7 @@ impl From for GossipKind { /// Get subnet id from an attestation subnet topic hash. pub fn subnet_from_topic_hash(topic_hash: &TopicHash) -> Option { - let gossip_topic = GossipTopic::decode(topic_hash.as_str()).ok()?; - match gossip_topic.kind() { - GossipKind::Attestation(subnet_id) => Some(Subnet::Attestation(*subnet_id)), - GossipKind::SyncCommitteeMessage(subnet_id) => Some(Subnet::SyncCommittee(*subnet_id)), - _ => None, - } + GossipTopic::decode(topic_hash.as_str()).ok()?.subnet_id() } // Determines if a string is an attestation or sync committee topic. diff --git a/beacon_node/lighthouse_network/tests/common/behaviour.rs b/beacon_node/lighthouse_network/tests/common/behaviour.rs index ab4ae901f2..75797e63d1 100644 --- a/beacon_node/lighthouse_network/tests/common/behaviour.rs +++ b/beacon_node/lighthouse_network/tests/common/behaviour.rs @@ -100,10 +100,8 @@ where inner: TInner, pub addresses_of_peer: Vec, - pub inject_connected: Vec, - pub inject_disconnected: Vec, - pub inject_connection_established: Vec<(PeerId, ConnectionId, ConnectedPoint)>, - pub inject_connection_closed: Vec<(PeerId, ConnectionId, ConnectedPoint)>, + pub inject_connection_established: Vec<(PeerId, ConnectionId, ConnectedPoint, usize)>, + pub inject_connection_closed: Vec<(PeerId, ConnectionId, ConnectedPoint, usize)>, pub inject_event: Vec<( PeerId, ConnectionId, @@ -128,8 +126,6 @@ where Self { inner, addresses_of_peer: Vec::new(), - inject_connected: Vec::new(), - inject_disconnected: Vec::new(), inject_connection_established: Vec::new(), inject_connection_closed: Vec::new(), inject_event: Vec::new(), @@ -148,8 +144,6 @@ where #[allow(dead_code)] pub fn reset(&mut self) { self.addresses_of_peer = Vec::new(); - self.inject_connected = Vec::new(); - self.inject_disconnected = Vec::new(); self.inject_connection_established = Vec::new(); self.inject_connection_closed = Vec::new(); self.inject_event = Vec::new(); @@ -176,7 +170,13 @@ where expected_disconnections: usize, ) -> bool { if self.inject_connection_closed.len() == expected_closed_connections { - assert_eq!(self.inject_disconnected.len(), expected_disconnections); + assert_eq!( + self.inject_connection_closed + .iter() + .filter(|(.., remaining_established)| { *remaining_established == 0 }) + .count(), + expected_disconnections + ); return true; } @@ -193,7 +193,15 @@ where expected_connections: usize, ) -> bool { if self.inject_connection_established.len() == expected_established_connections { - assert_eq!(self.inject_connected.len(), expected_connections); + assert_eq!( + self.inject_connection_established + .iter() + .filter(|(.., reported_aditional_connections)| { + *reported_aditional_connections == 0 + }) + .count(), + expected_connections + ); return true; } @@ -219,37 +227,45 @@ where self.inner.addresses_of_peer(p) } - fn inject_connected(&mut self, peer: &PeerId) { - assert!( - self.inject_connection_established - .iter() - .any(|(peer_id, _, _)| peer_id == peer), - "`inject_connected` is called after at least one `inject_connection_established`." - ); - self.inject_connected.push(*peer); - self.inner.inject_connected(peer); - } - fn inject_connection_established( &mut self, p: &PeerId, c: &ConnectionId, e: &ConnectedPoint, errors: Option<&Vec>, + other_established: usize, ) { - self.inject_connection_established.push((*p, *c, e.clone())); - self.inner.inject_connection_established(p, c, e, errors); - } + let mut other_peer_connections = self + .inject_connection_established + .iter() + .rev() // take last to first + .filter_map(|(peer, .., other_established)| { + if p == peer { + Some(other_established) + } else { + None + } + }) + .take(other_established); - fn inject_disconnected(&mut self, peer: &PeerId) { - assert!( - self.inject_connection_closed - .iter() - .any(|(peer_id, _, _)| peer_id == peer), - "`inject_disconnected` is called after at least one `inject_connection_closed`." - ); - self.inject_disconnected.push(*peer); - self.inner.inject_disconnected(peer); + // We are informed that there are `other_established` additional connections. Ensure that the + // number of previous connections is consistent with this + if let Some(&prev) = other_peer_connections.next() { + if prev < other_established { + assert_eq!( + prev, + other_established - 1, + "Inconsistent connection reporting" + ) + } + assert_eq!(other_peer_connections.count(), other_established - 1); + } else { + assert_eq!(other_established, 0) + } + self.inject_connection_established + .push((*p, *c, e.clone(), other_established)); + self.inner + .inject_connection_established(p, c, e, errors, other_established); } fn inject_connection_closed( @@ -258,15 +274,46 @@ where c: &ConnectionId, e: &ConnectedPoint, handler: ::Handler, + remaining_established: usize, ) { - let connection = (*p, *c, e.clone()); + let mut other_closed_connections = self + .inject_connection_established + .iter() + .rev() // take last to first + .filter_map(|(peer, .., remaining_established)| { + if p == peer { + Some(remaining_established) + } else { + None + } + }) + .take(remaining_established); + + // We are informed that there are `other_established` additional connections. Ensure that the + // number of previous connections is consistent with this + if let Some(&prev) = other_closed_connections.next() { + if prev < remaining_established { + assert_eq!( + prev, + remaining_established - 1, + "Inconsistent closed connection reporting" + ) + } + assert_eq!(other_closed_connections.count(), remaining_established - 1); + } else { + assert_eq!(remaining_established, 0) + } assert!( - self.inject_connection_established.contains(&connection), + self.inject_connection_established + .iter() + .any(|(peer, conn_id, endpoint, _)| (peer, conn_id, endpoint) == (p, c, e)), "`inject_connection_closed` is called only for connections for \ which `inject_connection_established` was called first." ); - self.inject_connection_closed.push(connection); - self.inner.inject_connection_closed(p, c, e, handler); + self.inject_connection_closed + .push((*p, *c, e.clone(), remaining_established)); + self.inner + .inject_connection_closed(p, c, e, handler, remaining_established); } fn inject_event( @@ -278,14 +325,14 @@ where assert!( self.inject_connection_established .iter() - .any(|(peer_id, conn_id, _)| *peer_id == p && c == *conn_id), + .any(|(peer_id, conn_id, ..)| *peer_id == p && c == *conn_id), "`inject_event` is called for reported connections." ); assert!( !self .inject_connection_closed .iter() - .any(|(peer_id, conn_id, _)| *peer_id == p && c == *conn_id), + .any(|(peer_id, conn_id, ..)| *peer_id == p && c == *conn_id), "`inject_event` is never called for closed connections." ); diff --git a/beacon_node/network/src/service.rs b/beacon_node/network/src/service.rs index c6f68d5faa..a16c2c677c 100644 --- a/beacon_node/network/src/service.rs +++ b/beacon_node/network/src/service.rs @@ -11,7 +11,7 @@ use futures::channel::mpsc::Sender; use futures::future::OptionFuture; use futures::prelude::*; use lighthouse_network::{ - open_metrics_client::registry::Registry, MessageAcceptance, Service as LibP2PService, + prometheus_client::registry::Registry, MessageAcceptance, Service as LibP2PService, }; use lighthouse_network::{ rpc::{GoodbyeReason, RPCResponseErrorCode, RequestId}, diff --git a/beacon_node/src/cli.rs b/beacon_node/src/cli.rs index 4c2960c9d6..5150ab492b 100644 --- a/beacon_node/src/cli.rs +++ b/beacon_node/src/cli.rs @@ -108,7 +108,7 @@ pub fn cli_app<'a, 'b>() -> App<'a, 'b> { Arg::with_name("network-load") .long("network-load") .value_name("INTEGER") - .help("Lighthouse's network can be tuned for bandwidth/performance. Setting this to a high value, will increase the bandwidth lighthouse uses, increasing the likelihood of redundant information in exchange for faster communication. This can increase profit of validators marginally by receiving messages faster on the network. Lower values decrease bandwidth usage, but makes communication slower which can lead to validator performance reduction. Values are in the range [1,5].") + .help("Lighthouse's network can be tuned for bandwidth/performance. Setting this to a high value, will increase the bandwidth lighthouse uses, increasing the likelihood of redundant information in exchange for faster communication. This can increase profit of validators marginally by receiving messages faster on the network. Lower values decrease bandwidth usage, but makes communication slower which can lead to validator performance reduction. Values are in the range [1,5].") .default_value("3") .set(clap::ArgSettings::Hidden) .takes_value(true), @@ -409,9 +409,9 @@ pub fn cli_app<'a, 'b>() -> App<'a, 'b> { .takes_value(true) ) .arg( - Arg::with_name("fee-recipient") - .long("fee-recipient") - .value_name("FEE-RECIPIENT") + Arg::with_name("suggested-fee-recipient") + .long("suggested-fee-recipient") + .value_name("SUGGESTED-FEE-RECIPIENT") .help("Once the merge has happened, this address will receive transaction fees \ collected from any blocks produced by this node. Defaults to a junk \ address whilst the merge is in development stages. THE DEFAULT VALUE \ diff --git a/beacon_node/src/config.rs b/beacon_node/src/config.rs index 2040822931..7487acbde0 100644 --- a/beacon_node/src/config.rs +++ b/beacon_node/src/config.rs @@ -14,12 +14,7 @@ use std::net::{IpAddr, Ipv4Addr, ToSocketAddrs}; use std::net::{TcpListener, UdpSocket}; use std::path::{Path, PathBuf}; use std::str::FromStr; -use types::{Address, Checkpoint, Epoch, EthSpec, Hash256, PublicKeyBytes, GRAFFITI_BYTES_LEN}; - -// TODO(merge): remove this default value. It's just there to make life easy during -// early testnets. -const DEFAULT_SUGGESTED_FEE_RECIPIENT: [u8; 20] = - [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]; +use types::{Checkpoint, Epoch, EthSpec, Hash256, PublicKeyBytes, GRAFFITI_BYTES_LEN}; /// Gets the fully-initialized global client. /// @@ -253,12 +248,8 @@ pub fn get_config( client_config.execution_endpoints = Some(client_config.eth1.endpoints.clone()); } - client_config.suggested_fee_recipient = Some( - clap_utils::parse_optional(cli_args, "fee-recipient")? - // TODO(merge): remove this default value. It's just there to make life easy during - // early testnets. - .unwrap_or_else(|| Address::from(DEFAULT_SUGGESTED_FEE_RECIPIENT)), - ); + client_config.suggested_fee_recipient = + clap_utils::parse_optional(cli_args, "suggested-fee-recipient")?; if let Some(freezer_dir) = cli_args.value_of("freezer-dir") { client_config.freezer_db_path = Some(PathBuf::from(freezer_dir)); diff --git a/book/src/SUMMARY.md b/book/src/SUMMARY.md index 7552d42306..f719a3a2b1 100644 --- a/book/src/SUMMARY.md +++ b/book/src/SUMMARY.md @@ -11,6 +11,7 @@ * [Raspberry Pi 4](./pi.md) * [Cross-Compiling](./cross-compiling.md) * [Homebrew](./homebrew.md) + * [Update Priorities](./installation-priorities.md) * [Key Management](./key-management.md) * [Create a wallet](./wallet-create.md) * [Create a validator](./validator-create.md) @@ -21,6 +22,7 @@ * [Voluntary Exits](./voluntary-exit.md) * [Validator Monitoring](./validator-monitoring.md) * [Doppelganger Protection](./validator-doppelganger.md) + * [Suggested Fee Recipient](./suggested-fee-recipient.md) * [APIs](./api.md) * [Beacon Node API](./api-bn.md) * [/lighthouse](./api-lighthouse.md) diff --git a/book/src/api-lighthouse.md b/book/src/api-lighthouse.md index 7836ac14a4..77800b5396 100644 --- a/book/src/api-lighthouse.md +++ b/book/src/api-lighthouse.md @@ -409,7 +409,7 @@ The endpoint will return immediately. See the beacon node logs for an indication Manually provide `SignedBeaconBlock`s to backfill the database. This is intended for use by Lighthouse developers during testing only. -### `/lighthouse/block_rewards` +### `/lighthouse/analysis/block_rewards` Fetch information about the block rewards paid to proposers for a range of consecutive blocks. @@ -421,7 +421,7 @@ Two query parameters are required: Example: ```bash -curl "http://localhost:5052/lighthouse/block_rewards?start_slot=1&end_slot=32" | jq +curl "http://localhost:5052/lighthouse/analysis/block_rewards?start_slot=1&end_slot=32" | jq ``` ```json @@ -439,7 +439,7 @@ curl "http://localhost:5052/lighthouse/block_rewards?start_slot=1&end_slot=32" | Caveats: -* Presently only attestation rewards are computed. +* Presently only attestation and sync committee rewards are computed. * The output format is verbose and subject to change. Please see [`BlockReward`][block_reward_src] in the source. * For maximum efficiency the `start_slot` should satisfy `start_slot % slots_per_restore_point == 1`. @@ -447,4 +447,4 @@ Caveats: loading a state on a boundary is most efficient. [block_reward_src]: -https://github.com/sigp/lighthouse/tree/unstable/common/eth2/src/lighthouse/block_reward.rs \ No newline at end of file +https://github.com/sigp/lighthouse/tree/unstable/common/eth2/src/lighthouse/block_rewards.rs \ No newline at end of file diff --git a/book/src/api-vc-endpoints.md b/book/src/api-vc-endpoints.md index 14d18312e5..1066d5ef3a 100644 --- a/book/src/api-vc-endpoints.md +++ b/book/src/api-vc-endpoints.md @@ -315,7 +315,8 @@ Typical Responses | 200 "enable": true, "description": "validator_one", "deposit_gwei": "32000000000", - "graffiti": "Mr F was here" + "graffiti": "Mr F was here", + "suggested_fee_recipient": "0xa2e334e71511686bcfe38bb3ee1ad8f6babcc03d" }, { "enable": false, @@ -492,6 +493,7 @@ Typical Responses | 200, 400 "enable": true, "description": "validator_one", "graffiti": "Mr F was here", + "suggested_fee_recipient": "0xa2e334e71511686bcfe38bb3ee1ad8f6babcc03d", "voting_public_key": "0xa062f95fee747144d5e511940624bc6546509eeaeae9383257a9c43e7ddc58c17c2bab4ae62053122184c381b90db380", "url": "http://path-to-web3signer.com", "root_certificate_path": "/path/on/vc/filesystem/to/certificate.pem", @@ -503,6 +505,7 @@ Typical Responses | 200, 400 The following fields may be omitted or nullified to obtain default values: - `graffiti` +- `suggested_fee_recipient` - `root_certificate_path` - `request_timeout_ms` diff --git a/book/src/docker.md b/book/src/docker.md index eebbd5dde2..9a0378f091 100644 --- a/book/src/docker.md +++ b/book/src/docker.md @@ -107,10 +107,10 @@ $ docker run lighthouse:local lighthouse --help You can run a Docker beacon node with the following command: ```bash -$ docker run -p 9000:9000 -p 127.0.0.1:5052:5052 -v $HOME/.lighthouse:/root/.lighthouse sigp/lighthouse lighthouse --network mainnet beacon --http --http-address 0.0.0.0 +$ docker run -p 9000:9000/tcp -p 9000:9000/udp -p 127.0.0.1:5052:5052 -v $HOME/.lighthouse:/root/.lighthouse sigp/lighthouse lighthouse --network mainnet beacon --http --http-address 0.0.0.0 ``` -> To join the Pyrmont testnet, use `--network pyrmont` instead. +> To join the Prater testnet, use `--network prater` instead. > The `-p` and `-v` and values are described below. @@ -130,18 +130,18 @@ $ docker run -v $HOME/.lighthouse:/root/.lighthouse sigp/lighthouse lighthouse b ### Ports -In order to be a good peer and serve other peers you should expose port `9000`. +In order to be a good peer and serve other peers you should expose port `9000` for both TCP and UDP. Use the `-p` flag to do this: ```bash -$ docker run -p 9000:9000 sigp/lighthouse lighthouse beacon +$ docker run -p 9000:9000/tcp -p 9000:9000/udp sigp/lighthouse lighthouse beacon ``` If you use the `--http` flag you may also want to expose the HTTP port with `-p 127.0.0.1:5052:5052`. ```bash -$ docker run -p 9000:9000 -p 127.0.0.1:5052:5052 sigp/lighthouse lighthouse beacon --http --http-address 0.0.0.0 +$ docker run -p 9000:9000/tcp -p 9000:9000/udp -p 127.0.0.1:5052:5052 sigp/lighthouse lighthouse beacon --http --http-address 0.0.0.0 ``` [docker_hub]: https://hub.docker.com/repository/docker/sigp/lighthouse/ diff --git a/book/src/faq.md b/book/src/faq.md index 419f95dcbd..02a4bfea66 100644 --- a/book/src/faq.md +++ b/book/src/faq.md @@ -124,7 +124,7 @@ testnet configuration settings. Ensure that the network you wish to connect to is correct (the beacon node outputs the network it is connecting to in the initial boot-up log lines). On top of this, ensure that you are not using the same `datadir` as a previous network. I.e if you have been running the -`pyrmont` testnet and are now trying to join a new testnet but using the same +`prater` testnet and are now trying to join a new testnet but using the same `datadir` (the `datadir` is also printed out in the beacon node's logs on boot-up). diff --git a/book/src/installation-priorities.md b/book/src/installation-priorities.md new file mode 100644 index 0000000000..69d871c396 --- /dev/null +++ b/book/src/installation-priorities.md @@ -0,0 +1,22 @@ +# Update Priorities + +When publishing releases, Lighthouse will include an "Update Priority" section in the release notes. As an example, see the [release notes from v2.1.2](https://github.com/sigp/lighthouse/releases/tag/v2.1.2)). + +The "Update Priority" section will include a table which may appear like so: + +|User Class |Beacon Node | Validator Client| +--- | --- | --- +|Staking Users| Medium Priority | Low Priority | +|Non-Staking Users| Low Priority|---| + +To understand this table, the following terms are important: + +- *Staking users* are those who use `lighthouse bn` and `lighthouse vc` to stake on the Beacon Chain. +- *Non-staking users* are those who run a `lighthouse bn` for non-staking purposes (e.g., data analysis or applications). +- *High priority* updates should be completed as soon as possible (e.g., hours or days). +- *Medium priority* updates should be completed at the next convenience (e.g., days or a week). +- *Low priority* updates should be completed in the next routine update cycle (e.g., two weeks). + +Therefore, in the table above, staking users should update their BN in the next days or week and +their VC in the next routine update cycle. Non-staking should also update their BN in the next +routine update cycle. diff --git a/book/src/key-management.md b/book/src/key-management.md index 5116bd4579..23d11d550c 100644 --- a/book/src/key-management.md +++ b/book/src/key-management.md @@ -42,9 +42,9 @@ items, starting at one easy-to-backup mnemonic and ending with multiple keypairs. Creating a single validator looks like this: 1. Create a **wallet** and record the **mnemonic**: - - `lighthouse --network pyrmont account wallet create --name wally --password-file wally.pass` + - `lighthouse --network prater account wallet create --name wally --password-file wally.pass` 1. Create the voting and withdrawal **keystores** for one validator: - - `lighthouse --network pyrmont account validator create --wallet-name wally --wallet-password wally.pass --count 1` + - `lighthouse --network prater account validator create --wallet-name wally --wallet-password wally.pass --count 1` In step (1), we created a wallet in `~/.lighthouse/{network}/wallets` with the name diff --git a/book/src/mainnet-validator.md b/book/src/mainnet-validator.md index bb9a1d2184..c5881c6066 100644 --- a/book/src/mainnet-validator.md +++ b/book/src/mainnet-validator.md @@ -46,7 +46,7 @@ When using Lighthouse, the `--network` flag selects a network. E.g., - `lighthouse` (no flag): Mainnet. - `lighthouse --network mainnet`: Mainnet. -- `lighthouse --network pyrmont`: Pyrmont (testnet). +- `lighthouse --network prater`: Prater (testnet). Using the correct `--network` flag is very important; using the wrong flag can result in penalties, slashings or lost deposits. As a rule of thumb, always diff --git a/book/src/suggested-fee-recipient.md b/book/src/suggested-fee-recipient.md new file mode 100644 index 0000000000..6513495fe4 --- /dev/null +++ b/book/src/suggested-fee-recipient.md @@ -0,0 +1,91 @@ +# Suggested Fee Recipient + +*Note: these documents are not relevant until the Bellatrix (Merge) upgrade has occurred.* + +## Fee recipient trust assumptions + +During post-merge block production, the Beacon Node (BN) will provide a `suggested_fee_recipient` to +the execution node. This is a 20-byte Ethereum address which the EL might choose to set as the +coinbase and the recipient of other fees or rewards. + +There is no guarantee that an execution node will use the `suggested_fee_recipient` to collect fees, +it may use any address it chooses. It is assumed that an honest execution node *will* use the +`suggested_fee_recipient`, but users should note this trust assumption. + +The `suggested_fee_recipient` can be provided to the VC, who will transmit it to the BN. The also BN +has a choice regarding the fee recipient it passes to the execution node, creating another +noteworthy trust assumption. + +To be sure *you* control your fee recipient value, run your own BN and execution node (don't use +third-party services). + +The Lighthouse VC provides three methods for setting the `suggested_fee_recipient` (also known +simply as the "fee recipient") to be passed to the execution layer during block production. The +Lighthouse BN also provides a method for defining this value, should the VC not transmit a value. + +Assuming trustworthy nodes, the priority for the four methods is: + +1. `validator_definitions.yml` +1. `--suggested-fee-recipient-file` +1. `--suggested-fee-recipient` provided to the VC. +1. `--suggested-fee-recipient` provided to the BN. + +Users may configure the fee recipient via `validator_definitions.yml` or via the +`--suggested-fee-recipient-file` flag. The value in `validator_definitions.yml` will always take +precedence. + +### 1. Setting the fee recipient in the `validator_definitions.yml` + +Users can set the fee recipient in `validator_definitions.yml` with the `suggested_fee_recipient` +key. This option is recommended for most users, where each validator has a fixed fee recipient. + +Below is an example of the validator_definitions.yml with `suggested_fee_recipient` values: + +``` +--- +- enabled: true + voting_public_key: "0x87a580d31d7bc69069b55f5a01995a610dd391a26dc9e36e81057a17211983a79266800ab8531f21f1083d7d84085007" + type: local_keystore + voting_keystore_path: /home/paul/.lighthouse/validators/0x87a580d31d7bc69069b55f5a01995a610dd391a26dc9e36e81057a17211983a79266800ab8531f21f1083d7d84085007/voting-keystore.json + voting_keystore_password_path: /home/paul/.lighthouse/secrets/0x87a580d31d7bc69069b55f5a01995a610dd391a26dc9e36e81057a17211983a79266800ab8531f21f1083d7d84085007 + suggested_fee_recipient: "0x6cc8dcbca744a6e4ffedb98e1d0df903b10abd21" +- enabled: false + voting_public_key: "0xa5566f9ec3c6e1fdf362634ebec9ef7aceb0e460e5079714808388e5d48f4ae1e12897fed1bea951c17fa389d511e477" + type: local_keystore voting_keystore_path: /home/paul/.lighthouse/validators/0xa5566f9ec3c6e1fdf362634ebec9ef7aceb0e460e5079714808388e5d48f4ae1e12897fed1bea951c17fa389d511e477/voting-keystore.json + voting_keystore_password: myStrongpa55word123&$ + suggested_fee_recipient: "0xa2e334e71511686bcfe38bb3ee1ad8f6babcc03d" +``` + +### 2. Using the "--suggested-fee-recipient-file" flag on the validator client + +Users can specify a file with the `--suggested-fee-recipient-file` flag. This option is useful for dynamically +changing fee recipients. This file is reloaded each time a validator is chosen to propose a block. + +Usage: +`lighthouse vc --suggested-fee-recipient-file fee_recipient.txt` + +The file should contain key value pairs corresponding to validator public keys and their associated +fee recipient. The file can optionally contain a `default` key for the default case. + +The following example sets the default and the values for the validators with pubkeys `0x87a5` and +`0xa556`: + +``` +default: 0x6cc8dcbca744a6e4ffedb98e1d0df903b10abd21 +0x87a580d31d7bc69069b55f5a01995a610dd391a26dc9e36e81057a17211983a79266800ab8531f21f1083d7d84085007: 0x6cc8dcbca744a6e4ffedb98e1d0df903b10abd21 +0xa5566f9ec3c6e1fdf362634ebec9ef7aceb0e460e5079714808388e5d48f4ae1e12897fed1bea951c17fa389d511e477: 0xa2e334e71511686bcfe38bb3ee1ad8f6babcc03d +``` + +Lighthouse will first search for the fee recipient corresponding to the public key of the proposing +validator, if there are no matches for the public key, then it uses the address corresponding to the +default key (if present). + +### 3. Using the "--suggested-fee-recipient" flag on the validator client + +The `--suggested-fee-recipient` can be provided to the VC to act as a default value for all +validators where a `suggested_fee_recipient` is not loaded from another method. + +### 4. Using the "--suggested-fee-recipient" flag on the beacon node + +The `--suggested-fee-recipient` can be provided to the BN to act as a default value when the +validator client does not transmit a `suggested_fee_recipient` to the BN. diff --git a/book/src/testnet-validator.md b/book/src/testnet-validator.md index 22ea418f6a..0bcd58c9ac 100644 --- a/book/src/testnet-validator.md +++ b/book/src/testnet-validator.md @@ -1,7 +1,6 @@ # Become a Testnet Validator [mainnet-validator]: ./mainnet-validator.md -[pyrmont-launchpad]: https://pyrmont.launchpad.ethereum.org/ [prater-launchpad]: https://prater.launchpad.ethereum.org/ Joining an Eth2 testnet is a great way to get familiar with staking in Phase 0. All users should @@ -11,15 +10,12 @@ To join a testnet, you can follow the [Become an Eth2 Mainnet Validator][mainnet instructions but with a few differences: 1. Use the appropriate Eth2 launchpad website: - - [Pyrmont][pyrmont-launchpad] - [Prater][prater-launchpad] 1. Instead of `--network mainnet`, use the appropriate network flag: - - `--network pyrmont`: Pyrmont. - `--network prater`: Prater. 1. Use a Goerli Eth1 node instead of a mainnet one: - For Geth, this means using `geth --goerli --http`. 1. Notice that Lighthouse will store its files in a different directory by default: - - `~/.lighthouse/pyrmont`: Pyrmont. - `~/.lighthouse/prater`: Prater. > diff --git a/book/src/validator-create.md b/book/src/validator-create.md index 91af60078a..e7c316a95f 100644 --- a/book/src/validator-create.md +++ b/book/src/validator-create.md @@ -50,8 +50,7 @@ OPTIONS: The GWEI value of the deposit amount. Defaults to the minimum amount required for an active validator (MAX_EFFECTIVE_BALANCE) --network - Name of the Eth2 chain Lighthouse will sync and follow. [default: mainnet] [possible values: medalla, - altona, spadina, pyrmont, mainnet, toledo] + Name of the Eth2 chain Lighthouse will sync and follow. [default: mainnet] [possible values: prater, mainnet] --secrets-dir The path where the validator keystore passwords will be stored. Defaults to ~/.lighthouse/{network}/secrets @@ -75,7 +74,7 @@ The example assumes that the `wally` wallet was generated from the [wallet](./wallet-create.md) example. ```bash -lighthouse --network pyrmont account validator create --wallet-name wally --wallet-password wally.pass --count 1 +lighthouse --network prater account validator create --wallet-name wally --wallet-password wally.pass --count 1 ``` This command will: @@ -86,6 +85,6 @@ This command will: - An encrypted keystore containing the validators voting keypair. - An `eth1_deposit_data.rlp` assuming the default deposit amount (`32 ETH` for most testnets and mainnet) which can be submitted to the deposit - contract for the Pyrmont testnet. Other testnets can be set via the + contract for the Prater testnet. Other testnets can be set via the `--network` CLI param. - Store a password to the validators voting keypair in `~/.lighthouse/{network}/secrets`. diff --git a/book/src/voluntary-exit.md b/book/src/voluntary-exit.md index 63f7258fbb..593bc9969b 100644 --- a/book/src/voluntary-exit.md +++ b/book/src/voluntary-exit.md @@ -39,13 +39,13 @@ The exit phrase is the following: -Below is an example for initiating a voluntary exit on the Pyrmont testnet. +Below is an example for initiating a voluntary exit on the Prater testnet. ``` -$ lighthouse --network pyrmont account validator exit --keystore /path/to/keystore --beacon-node http://localhost:5052 +$ lighthouse --network prater account validator exit --keystore /path/to/keystore --beacon-node http://localhost:5052 -Running account manager for pyrmont network -validator-dir path: ~/.lighthouse/pyrmont/validators +Running account manager for Prater network +validator-dir path: ~/.lighthouse/prater/validators Enter the keystore password for validator in 0xabcd diff --git a/book/src/wallet-create.md b/book/src/wallet-create.md index 3445af4d49..17cac248b9 100644 --- a/book/src/wallet-create.md +++ b/book/src/wallet-create.md @@ -59,11 +59,11 @@ OPTIONS: ## Example -Creates a new wallet named `wally` and saves it in `~/.lighthouse/pyrmont/wallets` with a randomly generated password saved +Creates a new wallet named `wally` and saves it in `~/.lighthouse/prater/wallets` with a randomly generated password saved to `./wallet.pass`: ```bash -lighthouse --network pyrmont account wallet create --name wally --password-file wally.pass +lighthouse --network prater account wallet create --name wally --password-file wally.pass ``` > Notes: diff --git a/boot_node/Cargo.toml b/boot_node/Cargo.toml index 32b7b19511..1d86da52e5 100644 --- a/boot_node/Cargo.toml +++ b/boot_node/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "boot_node" -version = "2.1.2" +version = "2.1.3" authors = ["Sigma Prime "] edition = "2018" diff --git a/common/account_utils/src/validator_definitions.rs b/common/account_utils/src/validator_definitions.rs index d66683bee0..6a3f5a6099 100644 --- a/common/account_utils/src/validator_definitions.rs +++ b/common/account_utils/src/validator_definitions.rs @@ -13,7 +13,7 @@ use std::collections::HashSet; use std::fs::{self, OpenOptions}; use std::io; use std::path::{Path, PathBuf}; -use types::{graffiti::GraffitiString, PublicKey}; +use types::{graffiti::GraffitiString, Address, PublicKey}; use validator_dir::VOTING_KEYSTORE_FILE; /// The file name for the serialized `ValidatorDefinitions` struct. @@ -93,6 +93,9 @@ pub struct ValidatorDefinition { #[serde(skip_serializing_if = "Option::is_none")] pub graffiti: Option, #[serde(default)] + #[serde(skip_serializing_if = "Option::is_none")] + pub suggested_fee_recipient: Option
, + #[serde(default)] pub description: String, #[serde(flatten)] pub signing_definition: SigningDefinition, @@ -109,6 +112,7 @@ impl ValidatorDefinition { voting_keystore_path: P, voting_keystore_password: Option, graffiti: Option, + suggested_fee_recipient: Option
, ) -> Result { let voting_keystore_path = voting_keystore_path.as_ref().into(); let keystore = @@ -120,6 +124,7 @@ impl ValidatorDefinition { voting_public_key, description: keystore.description().unwrap_or("").to_string(), graffiti, + suggested_fee_recipient, signing_definition: SigningDefinition::LocalKeystore { voting_keystore_path, voting_keystore_password_path: None, @@ -265,6 +270,7 @@ impl ValidatorDefinitions { voting_public_key, description: keystore.description().unwrap_or("").to_string(), graffiti: None, + suggested_fee_recipient: None, signing_definition: SigningDefinition::LocalKeystore { voting_keystore_path, voting_keystore_password_path, @@ -466,4 +472,45 @@ mod tests { Some(GraffitiString::from_str("mrfwashere").unwrap()) ); } + + #[test] + fn suggested_fee_recipient_checks() { + let no_suggested_fee_recipient = r#"--- + description: "" + enabled: true + type: local_keystore + voting_keystore_path: "" + voting_public_key: "0xaf3c7ddab7e293834710fca2d39d068f884455ede270e0d0293dc818e4f2f0f975355067e8437955cb29aec674e5c9e7" + "#; + let def: ValidatorDefinition = serde_yaml::from_str(no_suggested_fee_recipient).unwrap(); + assert!(def.suggested_fee_recipient.is_none()); + + let invalid_suggested_fee_recipient = r#"--- + description: "" + enabled: true + type: local_keystore + suggested_fee_recipient: "foopy" + voting_keystore_path: "" + voting_public_key: "0xaf3c7ddab7e293834710fca2d39d068f884455ede270e0d0293dc818e4f2f0f975355067e8437955cb29aec674e5c9e7" + "#; + + let def: Result = + serde_yaml::from_str(invalid_suggested_fee_recipient); + assert!(def.is_err()); + + let valid_suggested_fee_recipient = r#"--- + description: "" + enabled: true + type: local_keystore + suggested_fee_recipient: "0xa2e334e71511686bcfe38bb3ee1ad8f6babcc03d" + voting_keystore_path: "" + voting_public_key: "0xaf3c7ddab7e293834710fca2d39d068f884455ede270e0d0293dc818e4f2f0f975355067e8437955cb29aec674e5c9e7" + "#; + + let def: ValidatorDefinition = serde_yaml::from_str(valid_suggested_fee_recipient).unwrap(); + assert_eq!( + def.suggested_fee_recipient, + Some(Address::from_str("0xa2e334e71511686bcfe38bb3ee1ad8f6babcc03d").unwrap()) + ); + } } diff --git a/common/eth2/src/lib.rs b/common/eth2/src/lib.rs index 608a2c9e22..e57faa4fe2 100644 --- a/common/eth2/src/lib.rs +++ b/common/eth2/src/lib.rs @@ -892,6 +892,23 @@ impl BeaconNodeHttpClient { Ok(()) } + /// `POST validator/prepare_beacon_proposer` + pub async fn post_validator_prepare_beacon_proposer( + &self, + preparation_data: &[ProposerPreparationData], + ) -> Result<(), Error> { + let mut path = self.eth_path(V1)?; + + path.path_segments_mut() + .map_err(|()| Error::InvalidUrl(self.server.clone()))? + .push("validator") + .push("prepare_beacon_proposer"); + + self.post(path, &preparation_data).await?; + + Ok(()) + } + /// `GET config/fork_schedule` pub async fn get_config_fork_schedule(&self) -> Result>, Error> { let mut path = self.eth_path(V1)?; diff --git a/common/eth2/src/lighthouse_vc/types.rs b/common/eth2/src/lighthouse_vc/types.rs index 25b3050538..9bf7546749 100644 --- a/common/eth2/src/lighthouse_vc/types.rs +++ b/common/eth2/src/lighthouse_vc/types.rs @@ -23,6 +23,9 @@ pub struct ValidatorRequest { #[serde(default)] #[serde(skip_serializing_if = "Option::is_none")] pub graffiti: Option, + #[serde(default)] + #[serde(skip_serializing_if = "Option::is_none")] + pub suggested_fee_recipient: Option
, #[serde(with = "eth2_serde_utils::quoted_u64")] pub deposit_gwei: u64, } @@ -43,6 +46,9 @@ pub struct CreatedValidator { #[serde(default)] #[serde(skip_serializing_if = "Option::is_none")] pub graffiti: Option, + #[serde(default)] + #[serde(skip_serializing_if = "Option::is_none")] + pub suggested_fee_recipient: Option
, pub eth1_deposit_tx_data: String, #[serde(with = "eth2_serde_utils::quoted_u64")] pub deposit_gwei: u64, @@ -65,6 +71,7 @@ pub struct KeystoreValidatorsPostRequest { pub enable: bool, pub keystore: Keystore, pub graffiti: Option, + pub suggested_fee_recipient: Option
, } #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] @@ -74,6 +81,9 @@ pub struct Web3SignerValidatorRequest { #[serde(default)] #[serde(skip_serializing_if = "Option::is_none")] pub graffiti: Option, + #[serde(default)] + #[serde(skip_serializing_if = "Option::is_none")] + pub suggested_fee_recipient: Option
, pub voting_public_key: PublicKey, pub url: String, #[serde(default)] diff --git a/common/eth2_config/src/lib.rs b/common/eth2_config/src/lib.rs index fafa15ef8d..7c1f5810fd 100644 --- a/common/eth2_config/src/lib.rs +++ b/common/eth2_config/src/lib.rs @@ -235,7 +235,6 @@ macro_rules! define_hardcoded_nets { // The directory containing the testnet files should match the human-friendly name (element 1). define_hardcoded_nets!( (mainnet, "mainnet", GENESIS_STATE_IS_KNOWN), - (pyrmont, "pyrmont", GENESIS_STATE_IS_KNOWN), (prater, "prater", GENESIS_STATE_IS_KNOWN), (gnosis, "gnosis", GENESIS_STATE_IS_KNOWN) ); diff --git a/common/eth2_network_config/built_in_network_configs/pyrmont/boot_enr.yaml b/common/eth2_network_config/built_in_network_configs/pyrmont/boot_enr.yaml deleted file mode 100644 index 1461179b95..0000000000 --- a/common/eth2_network_config/built_in_network_configs/pyrmont/boot_enr.yaml +++ /dev/null @@ -1,8 +0,0 @@ -# @protolambda bootnode 1 -- enr:-Ku4QOA5OGWObY8ep_x35NlGBEj7IuQULTjkgxC_0G1AszqGEA0Wn2RNlyLFx9zGTNB1gdFBA6ZDYxCgIza1uJUUOj4Dh2F0dG5ldHOIAAAAAAAAAACEZXRoMpDVTPWXAAAgCf__________gmlkgnY0gmlwhDQPSjiJc2VjcDI1NmsxoQM6yTQB6XGWYJbI7NZFBjp4Yb9AYKQPBhVrfUclQUobb4N1ZHCCIyg -# @protolambda bootnode 2 -- enr:-Ku4QOksdA2tabOGrfOOr6NynThMoio6Ggka2oDPqUuFeWCqcRM2alNb8778O_5bK95p3EFt0cngTUXm2H7o1jkSJ_8Dh2F0dG5ldHOIAAAAAAAAAACEZXRoMpDVTPWXAAAgCf__________gmlkgnY0gmlwhDaa13aJc2VjcDI1NmsxoQKdNQJvnohpf0VO0ZYCAJxGjT0uwJoAHbAiBMujGjK0SoN1ZHCCIyg -# lighthouse bootnode 1 -- enr:-LK4QDiPGwNomqUqNDaM3iHYvtdX7M5qngson6Qb2xGIg1LwC8-Nic0aQwO0rVbJt5xp32sRE3S1YqvVrWO7OgVNv0kBh2F0dG5ldHOIAAAAAAAAAACEZXRoMpA7CIeVAAAgCf__________gmlkgnY0gmlwhBKNA4qJc2VjcDI1NmsxoQKbBS4ROQ_sldJm5tMgi36qm5I5exKJFb4C8dDVS_otAoN0Y3CCIyiDdWRwgiMo -# lighthouse bootnode 2 -- enr:-LK4QKAezYUw_R4P1vkzfw9qMQQFJvRQy3QsUblWxIZ4FSduJ2Kueik-qY5KddcVTUsZiEO-oZq0LwbaSxdYf27EjckBh2F0dG5ldHOIAAAAAAAAAACEZXRoMpA7CIeVAAAgCf__________gmlkgnY0gmlwhCOmkIaJc2VjcDI1NmsxoQOQgTD4a8-rESfTdbCG0V6Yz1pUvze02jB2Py3vzGWhG4N0Y3CCIyiDdWRwgiMo diff --git a/common/eth2_network_config/built_in_network_configs/pyrmont/config.yaml b/common/eth2_network_config/built_in_network_configs/pyrmont/config.yaml deleted file mode 100644 index 913671c2be..0000000000 --- a/common/eth2_network_config/built_in_network_configs/pyrmont/config.yaml +++ /dev/null @@ -1,85 +0,0 @@ -# Pyrmont config - -# Extends the mainnet preset -PRESET_BASE: 'mainnet' - -# Transition -# --------------------------------------------------------------- -# TBD, 2**256-2**10 is a placeholder -TERMINAL_TOTAL_DIFFICULTY: 115792089237316195423570985008687907853269984665640564039457584007913129638912 -# By default, don't use these params -TERMINAL_BLOCK_HASH: 0x0000000000000000000000000000000000000000000000000000000000000000 -TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH: 18446744073709551615 - -# Genesis -# --------------------------------------------------------------- -# `2**14` (= 16,384) -MIN_GENESIS_ACTIVE_VALIDATOR_COUNT: 16384 -# Nov 18, 2020, 12pm UTC -MIN_GENESIS_TIME: 1605700800 -# Pyrmont area code -GENESIS_FORK_VERSION: 0x00002009 -# Customized for Pyrmont: 432000 seconds (5 days) -GENESIS_DELAY: 432000 - - -# Forking -# --------------------------------------------------------------- -# Some forks are disabled for now: -# - These may be re-assigned to another fork-version later -# - Temporarily set to max uint64 value: 2**64 - 1 - -# Altair -ALTAIR_FORK_VERSION: 0x01002009 -ALTAIR_FORK_EPOCH: 61650 -# Merge -BELLATRIX_FORK_VERSION: 0x02002009 -BELLATRIX_FORK_EPOCH: 18446744073709551615 -# Sharding -SHARDING_FORK_VERSION: 0x03002009 -SHARDING_FORK_EPOCH: 18446744073709551615 - -# TBD, 2**32 is a placeholder. Merge transition approach is in active R&D. -TRANSITION_TOTAL_DIFFICULTY: 4294967296 - - -# Time parameters -# --------------------------------------------------------------- -# 12 seconds -SECONDS_PER_SLOT: 12 -# 14 (estimate from Eth1 mainnet) -SECONDS_PER_ETH1_BLOCK: 14 -# 2**8 (= 256) epochs ~27 hours -MIN_VALIDATOR_WITHDRAWABILITY_DELAY: 256 -# 2**8 (= 256) epochs ~27 hours -SHARD_COMMITTEE_PERIOD: 256 -# 2**11 (= 2,048) Eth1 blocks ~8 hours -ETH1_FOLLOW_DISTANCE: 2048 - - -# Validator cycle -# --------------------------------------------------------------- -# 2**2 (= 4) -INACTIVITY_SCORE_BIAS: 4 -# 2**4 (= 16) -INACTIVITY_SCORE_RECOVERY_RATE: 16 -# 2**4 * 10**9 (= 16,000,000,000) Gwei -EJECTION_BALANCE: 16000000000 -# 2**2 (= 4) -MIN_PER_EPOCH_CHURN_LIMIT: 4 -# 2**16 (= 65,536) -CHURN_LIMIT_QUOTIENT: 65536 - - -# Fork choice -# --------------------------------------------------------------- -# 70% -PROPOSER_SCORE_BOOST: 70 - -# Deposit contract -# --------------------------------------------------------------- -# Ethereum Goerli testnet -DEPOSIT_CHAIN_ID: 5 -DEPOSIT_NETWORK_ID: 5 -# Pyrmont test deposit contract on Goerli (2nd edition, 0x00002009 fork version) -DEPOSIT_CONTRACT_ADDRESS: 0x8c5fecdC472E27Bc447696F431E425D02dd46a8c diff --git a/common/eth2_network_config/built_in_network_configs/pyrmont/deploy_block.txt b/common/eth2_network_config/built_in_network_configs/pyrmont/deploy_block.txt deleted file mode 100644 index 29d19b8ee4..0000000000 --- a/common/eth2_network_config/built_in_network_configs/pyrmont/deploy_block.txt +++ /dev/null @@ -1 +0,0 @@ -3743587 diff --git a/common/eth2_network_config/built_in_network_configs/pyrmont/genesis.ssz.zip b/common/eth2_network_config/built_in_network_configs/pyrmont/genesis.ssz.zip deleted file mode 100644 index 3b76c021a2..0000000000 Binary files a/common/eth2_network_config/built_in_network_configs/pyrmont/genesis.ssz.zip and /dev/null differ diff --git a/common/eth2_network_config/src/lib.rs b/common/eth2_network_config/src/lib.rs index 044548bd61..fa8e1a3dd1 100644 --- a/common/eth2_network_config/src/lib.rs +++ b/common/eth2_network_config/src/lib.rs @@ -25,7 +25,7 @@ pub const BASE_CONFIG_FILE: &str = "config.yaml"; // Creates definitions for: // -// - Each of the `HardcodedNet` values (e.g., `MAINNET`, `PYRMONT`, etc). +// - Each of the `HardcodedNet` values (e.g., `MAINNET`, `PRATER`, etc). // - `HARDCODED_NETS: &[HardcodedNet]` // - `HARDCODED_NET_NAMES: &[&'static str]` instantiate_hardcoded_nets!(eth2_config); diff --git a/common/lighthouse_version/src/lib.rs b/common/lighthouse_version/src/lib.rs index 3c6b2459ec..5bf4e547e8 100644 --- a/common/lighthouse_version/src/lib.rs +++ b/common/lighthouse_version/src/lib.rs @@ -16,7 +16,7 @@ pub const VERSION: &str = git_version!( // NOTE: using --match instead of --exclude for compatibility with old Git "--match=thiswillnevermatchlol" ], - prefix = "Lighthouse/v2.1.2-", + prefix = "Lighthouse/v2.1.3-", fallback = "unknown" ); diff --git a/consensus/types/src/lib.rs b/consensus/types/src/lib.rs index ef0fcf804c..79efac360e 100644 --- a/consensus/types/src/lib.rs +++ b/consensus/types/src/lib.rs @@ -47,6 +47,7 @@ pub mod graffiti; pub mod historical_batch; pub mod indexed_attestation; pub mod pending_attestation; +pub mod proposer_preparation_data; pub mod proposer_slashing; pub mod relative_epoch; pub mod selection_proof; @@ -126,6 +127,7 @@ pub use crate::participation_flags::ParticipationFlags; pub use crate::participation_list::ParticipationList; pub use crate::pending_attestation::PendingAttestation; pub use crate::preset::{AltairPreset, BasePreset, BellatrixPreset}; +pub use crate::proposer_preparation_data::ProposerPreparationData; pub use crate::proposer_slashing::ProposerSlashing; pub use crate::relative_epoch::{Error as RelativeEpochError, RelativeEpoch}; pub use crate::selection_proof::SelectionProof; diff --git a/consensus/types/src/proposer_preparation_data.rs b/consensus/types/src/proposer_preparation_data.rs new file mode 100644 index 0000000000..f2ea967114 --- /dev/null +++ b/consensus/types/src/proposer_preparation_data.rs @@ -0,0 +1,12 @@ +use crate::*; +use serde::{Deserialize, Serialize}; + +/// A proposer preparation, created when a validator prepares the beacon node for potential proposers +/// by supplying information required when proposing blocks for the given validators. +#[derive(PartialEq, Debug, Serialize, Deserialize, Clone)] +pub struct ProposerPreparationData { + /// The validators index. + pub validator_index: u64, + /// The fee-recipient address. + pub fee_recipient: Address, +} diff --git a/lcli/Cargo.toml b/lcli/Cargo.toml index 2c734a0639..8c5fa7ae02 100644 --- a/lcli/Cargo.toml +++ b/lcli/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "lcli" description = "Lighthouse CLI (modeled after zcli)" -version = "2.1.2" +version = "2.1.3" authors = ["Paul Hauner "] edition = "2018" diff --git a/lighthouse/Cargo.toml b/lighthouse/Cargo.toml index 0ec85ec5ae..9cdcaea675 100644 --- a/lighthouse/Cargo.toml +++ b/lighthouse/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lighthouse" -version = "2.1.2" +version = "2.1.3" authors = ["Sigma Prime "] edition = "2018" autotests = false diff --git a/lighthouse/tests/account_manager.rs b/lighthouse/tests/account_manager.rs index 96be44fcad..fcc1d2aee2 100644 --- a/lighthouse/tests/account_manager.rs +++ b/lighthouse/tests/account_manager.rs @@ -493,6 +493,7 @@ fn validator_import_launchpad() { enabled: false, description: "".into(), graffiti: None, + suggested_fee_recipient: None, voting_public_key: keystore.public_key().unwrap(), signing_definition: SigningDefinition::LocalKeystore { voting_keystore_path, @@ -612,6 +613,7 @@ fn validator_import_launchpad_no_password_then_add_password() { enabled: true, description: "".into(), graffiti: None, + suggested_fee_recipient: None, voting_public_key: keystore.public_key().unwrap(), signing_definition: SigningDefinition::LocalKeystore { voting_keystore_path, @@ -635,6 +637,7 @@ fn validator_import_launchpad_no_password_then_add_password() { enabled: true, description: "".into(), graffiti: None, + suggested_fee_recipient: None, voting_public_key: keystore.public_key().unwrap(), signing_definition: SigningDefinition::LocalKeystore { voting_keystore_path: dst_keystore_dir.join(KEYSTORE_NAME), @@ -734,6 +737,7 @@ fn validator_import_launchpad_password_file() { description: "".into(), voting_public_key: keystore.public_key().unwrap(), graffiti: None, + suggested_fee_recipient: None, signing_definition: SigningDefinition::LocalKeystore { voting_keystore_path, voting_keystore_password_path: None, diff --git a/lighthouse/tests/beacon_node.rs b/lighthouse/tests/beacon_node.rs index 6d03cafe10..f630ed8e73 100644 --- a/lighthouse/tests/beacon_node.rs +++ b/lighthouse/tests/beacon_node.rs @@ -212,7 +212,7 @@ fn merge_fee_recipient_flag() { CommandLineTest::new() .flag("merge", None) .flag( - "fee-recipient", + "suggested-fee-recipient", Some("0x00000000219ab540356cbb839cbe05303d7705fa"), ) .run_with_zero_port() diff --git a/lighthouse/tests/validator_client.rs b/lighthouse/tests/validator_client.rs index eacc57d95f..e682471c46 100644 --- a/lighthouse/tests/validator_client.rs +++ b/lighthouse/tests/validator_client.rs @@ -7,8 +7,10 @@ use std::io::Write; use std::net::Ipv4Addr; use std::path::PathBuf; use std::process::Command; +use std::str::FromStr; use std::string::ToString; use tempfile::TempDir; +use types::Address; /// Returns the `lighthouse validator_client` command. fn base_cmd() -> Command { @@ -218,6 +220,83 @@ fn graffiti_file_with_pk_flag() { }); } +// Tests for suggested-fee-recipient flags. +#[test] +fn fee_recipient_flag() { + CommandLineTest::new() + .flag( + "suggested-fee-recipient", + Some("0x00000000219ab540356cbb839cbe05303d7705fa"), + ) + .run() + .with_config(|config| { + assert_eq!( + config.fee_recipient, + Some(Address::from_str("0x00000000219ab540356cbb839cbe05303d7705fa").unwrap()) + ) + }); +} +#[test] +fn fee_recipient_file_flag() { + let dir = TempDir::new().expect("Unable to create temporary directory"); + let mut file = + File::create(dir.path().join("fee_recipient.txt")).expect("Unable to create file"); + let new_key = Keypair::random(); + let pubkeybytes = PublicKeyBytes::from(new_key.pk); + let contents = "default:0x00000000219ab540356cbb839cbe05303d7705fa"; + file.write_all(contents.as_bytes()) + .expect("Unable to write to file"); + CommandLineTest::new() + .flag( + "suggested-fee-recipient-file", + dir.path().join("fee_recipient.txt").as_os_str().to_str(), + ) + .run() + .with_config(|config| { + // Public key not present so load default. + assert_eq!( + config + .fee_recipient_file + .clone() + .unwrap() + .load_fee_recipient(&pubkeybytes) + .unwrap(), + Some(Address::from_str("0x00000000219ab540356cbb839cbe05303d7705fa").unwrap()) + ) + }); +} +#[test] +fn fee_recipient_file_with_pk_flag() { + let dir = TempDir::new().expect("Unable to create temporary directory"); + let mut file = + File::create(dir.path().join("fee_recipient.txt")).expect("Unable to create file"); + let new_key = Keypair::random(); + let pubkeybytes = PublicKeyBytes::from(new_key.pk); + let contents = format!( + "{}:0x00000000219ab540356cbb839cbe05303d7705fa", + pubkeybytes.to_string() + ); + file.write_all(contents.as_bytes()) + .expect("Unable to write to file"); + CommandLineTest::new() + .flag( + "suggested-fee-recipient-file", + dir.path().join("fee_recipient.txt").as_os_str().to_str(), + ) + .run() + .with_config(|config| { + assert_eq!( + config + .fee_recipient_file + .clone() + .unwrap() + .load_fee_recipient(&pubkeybytes) + .unwrap(), + Some(Address::from_str("0x00000000219ab540356cbb839cbe05303d7705fa").unwrap()) + ) + }); +} + // Tests for HTTP flags. #[test] fn http_flag() { diff --git a/testing/web3signer_tests/src/lib.rs b/testing/web3signer_tests/src/lib.rs index 6d6ee9933e..d73e4a762d 100644 --- a/testing/web3signer_tests/src/lib.rs +++ b/testing/web3signer_tests/src/lib.rs @@ -336,6 +336,7 @@ mod tests { enabled: true, voting_public_key: validator_pubkey.clone(), graffiti: None, + suggested_fee_recipient: None, description: String::default(), signing_definition: SigningDefinition::LocalKeystore { voting_keystore_path: signer_rig.keystore_path.clone(), @@ -351,6 +352,7 @@ mod tests { enabled: true, voting_public_key: validator_pubkey.clone(), graffiti: None, + suggested_fee_recipient: None, description: String::default(), signing_definition: SigningDefinition::Web3Signer { url: signer_rig.url.to_string(), @@ -564,16 +566,6 @@ mod tests { test_altair_types("mainnet", 4243).await } - #[tokio::test] - async fn pyrmont_base_types() { - test_base_types("pyrmont", 4244).await - } - - #[tokio::test] - async fn pyrmont_altair_types() { - test_altair_types("pyrmont", 4245).await - } - #[tokio::test] async fn prater_base_types() { test_base_types("prater", 4246).await diff --git a/validator_client/src/cli.rs b/validator_client/src/cli.rs index 595cbb995a..d16e1e0a1e 100644 --- a/validator_client/src/cli.rs +++ b/validator_client/src/cli.rs @@ -127,6 +127,22 @@ pub fn cli_app<'a, 'b>() -> App<'a, 'b> { .takes_value(true) .conflicts_with("graffiti") ) + .arg( + Arg::with_name("suggested-fee-recipient") + .long("suggested-fee-recipient") + .help("The fallback address provided to the BN if nothing suitable is found \ + in the validator definitions or fee recipient file.") + .value_name("FEE-RECIPIENT") + .takes_value(true) + ) + .arg( + Arg::with_name("suggested-fee-recipient-file") + .long("suggested-fee-recipient-file") + .help("The fallback address provided to the BN if nothing suitable is found \ + in the validator definitions.") + .value_name("FEE-RECIPIENT-FILE") + .takes_value(true) + ) /* REST API related arguments */ .arg( Arg::with_name("http") diff --git a/validator_client/src/config.rs b/validator_client/src/config.rs index 0695012fb3..cb9f80eab5 100644 --- a/validator_client/src/config.rs +++ b/validator_client/src/config.rs @@ -1,3 +1,4 @@ +use crate::fee_recipient_file::FeeRecipientFile; use crate::graffiti_file::GraffitiFile; use crate::{http_api, http_metrics}; use clap::ArgMatches; @@ -13,7 +14,7 @@ use slog::{info, warn, Logger}; use std::fs; use std::net::Ipv4Addr; use std::path::PathBuf; -use types::GRAFFITI_BYTES_LEN; +use types::{Address, GRAFFITI_BYTES_LEN}; pub const DEFAULT_BEACON_NODE: &str = "http://localhost:5052/"; @@ -41,6 +42,10 @@ pub struct Config { pub graffiti: Option, /// Graffiti file to load per validator graffitis. pub graffiti_file: Option, + /// Fallback fallback address. + pub fee_recipient: Option
, + /// Fee recipient file to load per validator suggested-fee-recipients. + pub fee_recipient_file: Option, /// Configuration for the HTTP REST API. pub http_api: http_api::Config, /// Configuration for the HTTP REST API. @@ -79,6 +84,8 @@ impl Default for Config { use_long_timeouts: false, graffiti: None, graffiti_file: None, + fee_recipient: None, + fee_recipient_file: None, http_api: <_>::default(), http_metrics: <_>::default(), monitoring_api: None, @@ -197,6 +204,25 @@ impl Config { } } + if let Some(fee_recipient_file_path) = cli_args.value_of("suggested-fee-recipient-file") { + let mut fee_recipient_file = FeeRecipientFile::new(fee_recipient_file_path.into()); + fee_recipient_file + .read_fee_recipient_file() + .map_err(|e| format!("Error reading suggested-fee-recipient file: {:?}", e))?; + config.fee_recipient_file = Some(fee_recipient_file); + info!( + log, + "Successfully loaded suggested-fee-recipient file"; + "path" => fee_recipient_file_path + ); + } + + if let Some(input_fee_recipient) = + parse_optional::
(cli_args, "suggested-fee-recipient")? + { + config.fee_recipient = Some(input_fee_recipient); + } + if let Some(tls_certs) = parse_optional::(cli_args, "beacon-nodes-tls-certs")? { config.beacon_nodes_tls_certs = Some(tls_certs.split(',').map(PathBuf::from).collect()); } diff --git a/validator_client/src/fee_recipient_file.rs b/validator_client/src/fee_recipient_file.rs new file mode 100644 index 0000000000..637ca6d3d5 --- /dev/null +++ b/validator_client/src/fee_recipient_file.rs @@ -0,0 +1,184 @@ +use serde_derive::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::fs::File; +use std::io::{prelude::*, BufReader}; +use std::path::PathBuf; +use std::str::FromStr; + +use bls::PublicKeyBytes; +use types::Address; + +#[derive(Debug)] +#[allow(clippy::enum_variant_names)] +pub enum Error { + InvalidFile(std::io::Error), + InvalidLine(String), + InvalidPublicKey(String), + InvalidFeeRecipient(String), +} + +/// Struct to load validator fee-recipients from file. +/// The fee-recipient file is expected to have the following structure +/// +/// default: 0x00000000219ab540356cbb839cbe05303d7705fa +/// public_key1: fee-recipient1 +/// public_key2: fee-recipient2 +/// ... +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FeeRecipientFile { + fee_recipient_path: PathBuf, + fee_recipients: HashMap, + default: Option
, +} + +impl FeeRecipientFile { + pub fn new(fee_recipient_path: PathBuf) -> Self { + Self { + fee_recipient_path, + fee_recipients: HashMap::new(), + default: None, + } + } + + /// Returns the fee-recipient corresponding to the given public key if present, else returns the + /// default fee-recipient. + /// + /// Returns an error if loading from the fee-recipient file fails. + pub fn get_fee_recipient(&self, public_key: &PublicKeyBytes) -> Result, Error> { + Ok(self + .fee_recipients + .get(public_key) + .copied() + .or(self.default)) + } + + /// Loads the fee-recipient file and populates the default fee-recipient and `fee_recipients` hashmap. + /// Returns the fee-recipient corresponding to the given public key if present, else returns the + /// default fee-recipient. + /// + /// Returns an error if loading from the fee-recipient file fails. + pub fn load_fee_recipient( + &mut self, + public_key: &PublicKeyBytes, + ) -> Result, Error> { + self.read_fee_recipient_file()?; + Ok(self + .fee_recipients + .get(public_key) + .copied() + .or(self.default)) + } + + /// Reads from a fee-recipient file with the specified format and populates the default value + /// and the hashmap. + /// + /// Returns an error if the file does not exist, or if the format is invalid. + pub fn read_fee_recipient_file(&mut self) -> Result<(), Error> { + let file = File::open(self.fee_recipient_path.as_path()).map_err(Error::InvalidFile)?; + let reader = BufReader::new(file); + + let lines = reader.lines(); + + self.default = None; + self.fee_recipients.clear(); + + for line in lines { + let line = line.map_err(|e| Error::InvalidLine(e.to_string()))?; + let (pk_opt, fee_recipient) = read_line(&line)?; + match pk_opt { + Some(pk) => { + self.fee_recipients.insert(pk, fee_recipient); + } + None => self.default = Some(fee_recipient), + } + } + Ok(()) + } +} + +/// Parses a line from the fee-recipient file. +/// +/// `Ok((None, fee_recipient))` represents the fee-recipient for the default key. +/// `Ok((Some(pk), fee_recipient))` represents fee-recipient for the public key `pk`. +/// Returns an error if the line is in the wrong format or does not contain a valid public key or fee-recipient. +fn read_line(line: &str) -> Result<(Option, Address), Error> { + if let Some(i) = line.find(':') { + let (key, value) = line.split_at(i); + // Note: `value.len() >=1` so `value[1..]` is safe + let fee_recipient = Address::from_str(value[1..].trim()) + .map_err(|e| Error::InvalidFeeRecipient(e.to_string()))?; + if key == "default" { + Ok((None, fee_recipient)) + } else { + let pk = PublicKeyBytes::from_str(key).map_err(Error::InvalidPublicKey)?; + Ok((Some(pk), fee_recipient)) + } + } else { + Err(Error::InvalidLine(format!("Missing delimiter: {}", line))) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use bls::Keypair; + use std::io::LineWriter; + use tempfile::TempDir; + + const DEFAULT_FEE_RECIPIENT: &str = "0x00000000219ab540356cbb839cbe05303d7705fa"; + const CUSTOM_FEE_RECIPIENT1: &str = "0x4242424242424242424242424242424242424242"; + const CUSTOM_FEE_RECIPIENT2: &str = "0x0000000000000000000000000000000000000001"; + const PK1: &str = "0x800012708dc03f611751aad7a43a082142832b5c1aceed07ff9b543cf836381861352aa923c70eeb02018b638aa306aa"; + const PK2: &str = "0x80001866ce324de7d80ec73be15e2d064dcf121adf1b34a0d679f2b9ecbab40ce021e03bb877e1a2fe72eaaf475e6e21"; + + // Create a fee-recipient file in the required format and return a path to the file. + fn create_fee_recipient_file() -> PathBuf { + let temp = TempDir::new().unwrap(); + let pk1 = PublicKeyBytes::deserialize(&hex::decode(&PK1[2..]).unwrap()).unwrap(); + let pk2 = PublicKeyBytes::deserialize(&hex::decode(&PK2[2..]).unwrap()).unwrap(); + + let file_name = temp.into_path().join("fee_recipient.txt"); + + let file = File::create(&file_name).unwrap(); + let mut fee_recipient_file = LineWriter::new(file); + fee_recipient_file + .write_all(format!("default: {}\n", DEFAULT_FEE_RECIPIENT).as_bytes()) + .unwrap(); + fee_recipient_file + .write_all(format!("{}: {}\n", pk1.as_hex_string(), CUSTOM_FEE_RECIPIENT1).as_bytes()) + .unwrap(); + fee_recipient_file + .write_all(format!("{}: {}\n", pk2.as_hex_string(), CUSTOM_FEE_RECIPIENT2).as_bytes()) + .unwrap(); + fee_recipient_file.flush().unwrap(); + file_name + } + + #[test] + fn test_load_fee_recipient() { + let fee_recipient_file_path = create_fee_recipient_file(); + let mut gf = FeeRecipientFile::new(fee_recipient_file_path); + + let pk1 = PublicKeyBytes::deserialize(&hex::decode(&PK1[2..]).unwrap()).unwrap(); + let pk2 = PublicKeyBytes::deserialize(&hex::decode(&PK2[2..]).unwrap()).unwrap(); + + // Read once + gf.read_fee_recipient_file().unwrap(); + + assert_eq!( + gf.load_fee_recipient(&pk1).unwrap().unwrap(), + Address::from_str(CUSTOM_FEE_RECIPIENT1).unwrap() + ); + assert_eq!( + gf.load_fee_recipient(&pk2).unwrap().unwrap(), + Address::from_str(CUSTOM_FEE_RECIPIENT2).unwrap() + ); + + // Random pk should return the default fee-recipient + let random_pk = Keypair::random().pk.compress(); + assert_eq!( + gf.load_fee_recipient(&random_pk).unwrap().unwrap(), + Address::from_str(DEFAULT_FEE_RECIPIENT).unwrap() + ); + } +} diff --git a/validator_client/src/http_api/create_validator.rs b/validator_client/src/http_api/create_validator.rs index 3c4901e614..a8e4fd2629 100644 --- a/validator_client/src/http_api/create_validator.rs +++ b/validator_client/src/http_api/create_validator.rs @@ -139,6 +139,7 @@ pub async fn create_validators_mnemonic, T: 'static + SlotClock, voting_password_string, request.enable, request.graffiti.clone(), + request.suggested_fee_recipient, ) .await .map_err(|e| { @@ -152,6 +153,7 @@ pub async fn create_validators_mnemonic, T: 'static + SlotClock, enabled: request.enable, description: request.description.clone(), graffiti: request.graffiti.clone(), + suggested_fee_recipient: request.suggested_fee_recipient, voting_pubkey, eth1_deposit_tx_data: eth2_serde_utils::hex::encode(ð1_deposit_data.rlp), deposit_gwei: request.deposit_gwei, @@ -170,6 +172,7 @@ pub async fn create_validators_web3signer( enabled: request.enable, voting_public_key: request.voting_public_key.clone(), graffiti: request.graffiti.clone(), + suggested_fee_recipient: request.suggested_fee_recipient, description: request.description.clone(), signing_definition: SigningDefinition::Web3Signer { url: request.url.clone(), diff --git a/validator_client/src/http_api/keystores.rs b/validator_client/src/http_api/keystores.rs index ce4035581c..ce6089c5b6 100644 --- a/validator_client/src/http_api/keystores.rs +++ b/validator_client/src/http_api/keystores.rs @@ -201,6 +201,7 @@ fn import_single_keystore( password, true, None, + None, )) .map_err(|e| format!("failed to initialize validator: {:?}", e))?; diff --git a/validator_client/src/http_api/mod.rs b/validator_client/src/http_api/mod.rs index 8a5b24f87b..590bfc208a 100644 --- a/validator_client/src/http_api/mod.rs +++ b/validator_client/src/http_api/mod.rs @@ -409,6 +409,7 @@ pub fn serve( drop(validator_dir); let voting_password = body.password.clone(); let graffiti = body.graffiti.clone(); + let suggested_fee_recipient = body.suggested_fee_recipient; let validator_def = { if let Some(runtime) = runtime.upgrade() { @@ -418,6 +419,7 @@ pub fn serve( voting_password, body.enable, graffiti, + suggested_fee_recipient, )) .map_err(|e| { warp_utils::reject::custom_server_error(format!( diff --git a/validator_client/src/http_api/tests.rs b/validator_client/src/http_api/tests.rs index fda622901b..788ca28ffb 100644 --- a/validator_client/src/http_api/tests.rs +++ b/validator_client/src/http_api/tests.rs @@ -267,6 +267,7 @@ impl ApiTester { enable: !s.disabled.contains(&i), description: format!("boi #{}", i), graffiti: None, + suggested_fee_recipient: None, deposit_gwei: E::default_spec().max_effective_balance, }) .collect::>(); @@ -397,6 +398,7 @@ impl ApiTester { .into(), keystore, graffiti: None, + suggested_fee_recipient: None, }; self.client @@ -414,6 +416,7 @@ impl ApiTester { .into(), keystore, graffiti: None, + suggested_fee_recipient: None, }; let response = self @@ -449,6 +452,7 @@ impl ApiTester { enable: s.enabled, description: format!("{}", i), graffiti: None, + suggested_fee_recipient: None, voting_public_key: kp.pk, url: format!("http://signer_{}.com/", i), root_certificate_path: None, @@ -574,6 +578,7 @@ fn routes_with_invalid_auth() { enable: <_>::default(), description: <_>::default(), graffiti: <_>::default(), + suggested_fee_recipient: <_>::default(), deposit_gwei: <_>::default(), }]) .await @@ -602,6 +607,7 @@ fn routes_with_invalid_auth() { enable: <_>::default(), keystore, graffiti: <_>::default(), + suggested_fee_recipient: <_>::default(), }) .await }) diff --git a/validator_client/src/http_api/tests/keystores.rs b/validator_client/src/http_api/tests/keystores.rs index 1b35a0b57b..c56f2f2298 100644 --- a/validator_client/src/http_api/tests/keystores.rs +++ b/validator_client/src/http_api/tests/keystores.rs @@ -37,6 +37,7 @@ fn web3signer_validator_with_pubkey(pubkey: PublicKey) -> Web3SignerValidatorReq enable: true, description: "".into(), graffiti: None, + suggested_fee_recipient: None, voting_public_key: pubkey, url: web3_signer_url(), root_certificate_path: None, diff --git a/validator_client/src/initialized_validators.rs b/validator_client/src/initialized_validators.rs index 5900c8e56b..a4dedf16b2 100644 --- a/validator_client/src/initialized_validators.rs +++ b/validator_client/src/initialized_validators.rs @@ -27,7 +27,7 @@ use std::io::{self, Read}; use std::path::{Path, PathBuf}; use std::sync::Arc; use std::time::Duration; -use types::{Graffiti, Keypair, PublicKey, PublicKeyBytes}; +use types::{Address, Graffiti, Keypair, PublicKey, PublicKeyBytes}; use url::{ParseError, Url}; use validator_dir::Builder as ValidatorDirBuilder; @@ -104,6 +104,7 @@ impl From for Error { pub struct InitializedValidator { signing_method: Arc, graffiti: Option, + suggested_fee_recipient: Option
, /// The validators index in `state.validators`, to be updated by an external service. index: Option, } @@ -269,6 +270,7 @@ impl InitializedValidator { Ok(Self { signing_method: Arc::new(signing_method), graffiti: def.graffiti.map(Into::into), + suggested_fee_recipient: def.suggested_fee_recipient, index: None, }) } @@ -538,6 +540,14 @@ impl InitializedValidators { self.validators.get(public_key).and_then(|v| v.graffiti) } + /// Returns the `suggested_fee_recipient` for a given public key specified in the + /// `ValidatorDefinitions`. + pub fn suggested_fee_recipient(&self, public_key: &PublicKeyBytes) -> Option
{ + self.validators + .get(public_key) + .and_then(|v| v.suggested_fee_recipient) + } + /// Sets the `InitializedValidator` and `ValidatorDefinition` `enabled` values. /// /// ## Notes diff --git a/validator_client/src/lib.rs b/validator_client/src/lib.rs index a721496fcd..c58ac25f1f 100644 --- a/validator_client/src/lib.rs +++ b/validator_client/src/lib.rs @@ -5,10 +5,12 @@ mod check_synced; mod cli; mod config; mod duties_service; +mod fee_recipient_file; mod graffiti_file; mod http_metrics; mod key_cache; mod notifier; +mod preparation_service; mod signing_method; mod sync_committee_service; @@ -38,6 +40,7 @@ use eth2::{reqwest::ClientBuilder, BeaconNodeHttpClient, StatusCode, Timeouts}; use http_api::ApiSecret; use notifier::spawn_notifier; use parking_lot::RwLock; +use preparation_service::{PreparationService, PreparationServiceBuilder}; use reqwest::Certificate; use slog::{error, info, warn, Logger}; use slot_clock::SlotClock; @@ -82,6 +85,7 @@ pub struct ProductionValidatorClient { attestation_service: AttestationService, sync_committee_service: SyncCommitteeService, doppelganger_service: Option>, + preparation_service: PreparationService, validator_store: Arc>, http_api_listen_addr: Option, config: Config, @@ -406,6 +410,15 @@ impl ProductionValidatorClient { .runtime_context(context.service_context("attestation".into())) .build()?; + let preparation_service = PreparationServiceBuilder::new() + .slot_clock(slot_clock.clone()) + .validator_store(validator_store.clone()) + .beacon_nodes(beacon_nodes.clone()) + .runtime_context(context.service_context("preparation".into())) + .fee_recipient(config.fee_recipient) + .fee_recipient_file(config.fee_recipient_file.clone()) + .build()?; + let sync_committee_service = SyncCommitteeService::new( duties_service.clone(), validator_store.clone(), @@ -427,6 +440,7 @@ impl ProductionValidatorClient { attestation_service, sync_committee_service, doppelganger_service, + preparation_service, validator_store, config, http_api_listen_addr: None, @@ -458,6 +472,11 @@ impl ProductionValidatorClient { .start_update_service(&self.context.eth2_config.spec) .map_err(|e| format!("Unable to start sync committee service: {}", e))?; + self.preparation_service + .clone() + .start_update_service(&self.context.eth2_config.spec) + .map_err(|e| format!("Unable to start preparation service: {}", e))?; + if let Some(doppelganger_service) = self.doppelganger_service.clone() { DoppelgangerService::start_update_service( doppelganger_service, diff --git a/validator_client/src/preparation_service.rs b/validator_client/src/preparation_service.rs new file mode 100644 index 0000000000..e532bd2461 --- /dev/null +++ b/validator_client/src/preparation_service.rs @@ -0,0 +1,278 @@ +use crate::beacon_node_fallback::{BeaconNodeFallback, RequireSynced}; +use crate::{ + fee_recipient_file::FeeRecipientFile, + validator_store::{DoppelgangerStatus, ValidatorStore}, +}; +use environment::RuntimeContext; +use slog::{debug, error, info}; +use slot_clock::SlotClock; +use std::ops::Deref; +use std::sync::Arc; +use tokio::time::{sleep, Duration}; +use types::{Address, ChainSpec, EthSpec, ProposerPreparationData}; + +/// Builds an `PreparationService`. +pub struct PreparationServiceBuilder { + validator_store: Option>>, + slot_clock: Option, + beacon_nodes: Option>>, + context: Option>, + fee_recipient: Option
, + fee_recipient_file: Option, +} + +impl PreparationServiceBuilder { + pub fn new() -> Self { + Self { + validator_store: None, + slot_clock: None, + beacon_nodes: None, + context: None, + fee_recipient: None, + fee_recipient_file: None, + } + } + + pub fn validator_store(mut self, store: Arc>) -> Self { + self.validator_store = Some(store); + self + } + + pub fn slot_clock(mut self, slot_clock: T) -> Self { + self.slot_clock = Some(slot_clock); + self + } + + pub fn beacon_nodes(mut self, beacon_nodes: Arc>) -> Self { + self.beacon_nodes = Some(beacon_nodes); + self + } + + pub fn runtime_context(mut self, context: RuntimeContext) -> Self { + self.context = Some(context); + self + } + + pub fn fee_recipient(mut self, fee_recipient: Option
) -> Self { + self.fee_recipient = fee_recipient; + self + } + + pub fn fee_recipient_file(mut self, fee_recipient_file: Option) -> Self { + self.fee_recipient_file = fee_recipient_file; + self + } + + pub fn build(self) -> Result, String> { + Ok(PreparationService { + inner: Arc::new(Inner { + validator_store: self + .validator_store + .ok_or("Cannot build PreparationService without validator_store")?, + slot_clock: self + .slot_clock + .ok_or("Cannot build PreparationService without slot_clock")?, + beacon_nodes: self + .beacon_nodes + .ok_or("Cannot build PreparationService without beacon_nodes")?, + context: self + .context + .ok_or("Cannot build PreparationService without runtime_context")?, + fee_recipient: self.fee_recipient, + fee_recipient_file: self.fee_recipient_file, + }), + }) + } +} + +/// Helper to minimise `Arc` usage. +pub struct Inner { + validator_store: Arc>, + slot_clock: T, + beacon_nodes: Arc>, + context: RuntimeContext, + fee_recipient: Option
, + fee_recipient_file: Option, +} + +/// Attempts to produce proposer preparations for all known validators at the beginning of each epoch. +pub struct PreparationService { + inner: Arc>, +} + +impl Clone for PreparationService { + fn clone(&self) -> Self { + Self { + inner: self.inner.clone(), + } + } +} + +impl Deref for PreparationService { + type Target = Inner; + + fn deref(&self) -> &Self::Target { + self.inner.deref() + } +} + +impl PreparationService { + /// Starts the service which periodically produces proposer preparations. + pub fn start_update_service(self, spec: &ChainSpec) -> Result<(), String> { + let log = self.context.log().clone(); + + let slot_duration = Duration::from_secs(spec.seconds_per_slot); + let duration_to_next_epoch = self + .slot_clock + .duration_to_next_epoch(E::slots_per_epoch()) + .ok_or("Unable to determine duration to next epoch")?; + + info!( + log, + "Proposer preparation service started"; + "next_update_millis" => duration_to_next_epoch.as_millis() + ); + + let executor = self.context.executor.clone(); + let spec = spec.clone(); + + let interval_fut = async move { + loop { + // Poll the endpoint immediately to ensure fee recipients are received. + self.prepare_proposers_and_publish(&spec) + .await + .map_err(|e| { + error!( + log, + "Error during proposer preparation"; + "error" => format!("{:?}", e), + ) + }) + .unwrap_or(()); + + if let Some(duration_to_next_slot) = self.slot_clock.duration_to_next_slot() { + sleep(duration_to_next_slot).await; + } else { + error!(log, "Failed to read slot clock"); + // If we can't read the slot clock, just wait another slot. + sleep(slot_duration).await; + } + } + }; + + executor.spawn(interval_fut, "preparation_service"); + Ok(()) + } + + /// Prepare proposer preparations and send to beacon node + async fn prepare_proposers_and_publish(&self, spec: &ChainSpec) -> Result<(), String> { + let preparation_data = self.collect_preparation_data(spec); + if !preparation_data.is_empty() { + self.publish_preparation_data(preparation_data).await?; + } + + Ok(()) + } + + fn collect_preparation_data(&self, spec: &ChainSpec) -> Vec { + let log = self.context.log(); + + let fee_recipient_file = self + .fee_recipient_file + .clone() + .map(|mut fee_recipient_file| { + fee_recipient_file + .read_fee_recipient_file() + .map_err(|e| { + error!( + log, + "{}", format!("Error loading fee-recipient file: {:?}", e); + ); + }) + .unwrap_or(()); + fee_recipient_file + }); + + let all_pubkeys: Vec<_> = self + .validator_store + .voting_pubkeys(DoppelgangerStatus::ignored); + + all_pubkeys + .into_iter() + .filter_map(|pubkey| { + let validator_index = self.validator_store.validator_index(&pubkey); + if let Some(validator_index) = validator_index { + let fee_recipient = if let Some(from_validator_defs) = + self.validator_store.suggested_fee_recipient(&pubkey) + { + // If there is a `suggested_fee_recipient` in the validator definitions yaml + // file, use that value. + Some(from_validator_defs) + } else { + // If there's nothing in the validator defs file, check the fee recipient + // file. + fee_recipient_file + .as_ref() + .and_then(|f| match f.get_fee_recipient(&pubkey) { + Ok(f) => f, + Err(_e) => None, + }) + // If there's nothing in the file, try the process-level default value. + .or(self.fee_recipient) + }; + + if let Some(fee_recipient) = fee_recipient { + Some(ProposerPreparationData { + validator_index, + fee_recipient, + }) + } else { + if spec.bellatrix_fork_epoch.is_some() { + error!( + log, + "Validator is missing fee recipient"; + "msg" => "update validator_definitions.yml", + "pubkey" => ?pubkey + ); + } + None + } + } else { + None + } + }) + .collect() + } + + async fn publish_preparation_data( + &self, + preparation_data: Vec, + ) -> Result<(), String> { + let log = self.context.log(); + + // Post the proposer preparations to the BN. + let preparation_data_len = preparation_data.len(); + let preparation_entries = preparation_data.as_slice(); + match self + .beacon_nodes + .first_success(RequireSynced::Yes, |beacon_node| async move { + beacon_node + .post_validator_prepare_beacon_proposer(preparation_entries) + .await + }) + .await + { + Ok(()) => debug!( + log, + "Published proposer preparation"; + "count" => preparation_data_len, + ), + Err(e) => error!( + log, + "Unable to publish proposer preparation"; + "error" => %e, + ), + } + Ok(()) + } +} diff --git a/validator_client/src/validator_store.rs b/validator_client/src/validator_store.rs index 884b97694e..3f4a01faaa 100644 --- a/validator_client/src/validator_store.rs +++ b/validator_client/src/validator_store.rs @@ -17,7 +17,7 @@ use std::path::Path; use std::sync::Arc; use task_executor::TaskExecutor; use types::{ - attestation::Error as AttestationError, graffiti::GraffitiString, AggregateAndProof, + attestation::Error as AttestationError, graffiti::GraffitiString, Address, AggregateAndProof, Attestation, BeaconBlock, ChainSpec, ContributionAndProof, Domain, Epoch, EthSpec, Fork, Graffiti, Hash256, Keypair, PublicKeyBytes, SelectionProof, Signature, SignedAggregateAndProof, SignedBeaconBlock, SignedContributionAndProof, Slot, SyncAggregatorSelectionData, @@ -148,11 +148,13 @@ impl ValidatorStore { password: ZeroizeString, enable: bool, graffiti: Option, + suggested_fee_recipient: Option
, ) -> Result { let mut validator_def = ValidatorDefinition::new_keystore_with_password( voting_keystore_path, Some(password), graffiti.map(Into::into), + suggested_fee_recipient, ) .map_err(|e| format!("failed to create validator definitions: {:?}", e))?; @@ -351,6 +353,12 @@ impl ValidatorStore { self.validators.read().graffiti(validator_pubkey) } + pub fn suggested_fee_recipient(&self, validator_pubkey: &PublicKeyBytes) -> Option
{ + self.validators + .read() + .suggested_fee_recipient(validator_pubkey) + } + pub async fn sign_block( &self, validator_pubkey: PublicKeyBytes,