Update to tokio 1.1 (#2172)

## Issue Addressed

resolves #2129
resolves #2099 
addresses some of #1712
unblocks #2076
unblocks #2153 

## Proposed Changes

- Updates all the dependencies mentioned in #2129, except for web3. They haven't merged their tokio 1.0 update because they are waiting on some dependencies of their own. Since we only use web3 in tests, I think updating it in a separate issue is fine. If they are able to merge soon though, I can update in this PR. 

- Updates `tokio_util` to 0.6.2 and `bytes` to 1.0.1.

- We haven't made a discv5 release since merging tokio 1.0 updates so I'm using a commit rather than release atm. **Edit:** I think we should merge an update of `tokio_util` to 0.6.2 into discv5 before this release because it has panic fixes in `DelayQueue`  --> PR in discv5:  https://github.com/sigp/discv5/pull/58

## Additional Info

tokio 1.0 changes that required some changes in lighthouse:

- `interval.next().await.is_some()` -> `interval.tick().await`
- `sleep` future is now `!Unpin` -> https://github.com/tokio-rs/tokio/issues/3028
- `try_recv` has been temporarily removed from `mpsc` -> https://github.com/tokio-rs/tokio/issues/3350
- stream features have moved to `tokio-stream` and `broadcast::Receiver::into_stream()` has been temporarily removed -> `https://github.com/tokio-rs/tokio/issues/2870
- I've copied over the `BroadcastStream` wrapper from this PR, but can update to use `tokio-stream` once it's merged https://github.com/tokio-rs/tokio/pull/3384

Co-authored-by: realbigsean <seananderson33@gmail.com>
This commit is contained in:
realbigsean
2021-02-10 23:29:49 +00:00
parent 6f4da9a5d2
commit e20f64b21a
74 changed files with 1146 additions and 1327 deletions

View File

@@ -7,7 +7,6 @@ use lighthouse_version::VERSION;
use slog::{crit, info, warn};
use std::path::PathBuf;
use std::process::exit;
use tokio_compat_02::FutureExt;
use types::{EthSpec, EthSpecId};
use validator_client::ProductionValidatorClient;
@@ -281,19 +280,16 @@ fn run<E: EthSpec>(
&context.eth2_config().spec,
context.log().clone(),
)?;
environment.runtime().spawn(
async move {
if let Err(e) = ProductionBeaconNode::new(context.clone(), config).await {
crit!(log, "Failed to start beacon node"; "reason" => e);
// Ignore the error since it always occurs during normal operation when
// shutting down.
let _ = executor
.shutdown_sender()
.try_send("Failed to start beacon node");
}
environment.runtime().spawn(async move {
if let Err(e) = ProductionBeaconNode::new(context.clone(), config).await {
crit!(log, "Failed to start beacon node"; "reason" => e);
// Ignore the error since it always occurs during normal operation when
// shutting down.
let _ = executor
.shutdown_sender()
.try_send("Failed to start beacon node");
}
.compat(),
);
});
}
("validator_client", Some(matches)) => {
let context = environment.core_context();
@@ -301,26 +297,23 @@ fn run<E: EthSpec>(
let executor = context.executor.clone();
let config = validator_client::Config::from_cli(&matches, context.log())
.map_err(|e| format!("Unable to initialize validator config: {}", e))?;
environment.runtime().spawn(
async move {
let run = async {
ProductionValidatorClient::new(context, config)
.await?
.start_service()?;
environment.runtime().spawn(async move {
let run = async {
ProductionValidatorClient::new(context, config)
.await?
.start_service()?;
Ok::<(), String>(())
};
if let Err(e) = run.await {
crit!(log, "Failed to start validator client"; "reason" => e);
// Ignore the error since it always occurs during normal operation when
// shutting down.
let _ = executor
.shutdown_sender()
.try_send("Failed to start validator client");
}
Ok::<(), String>(())
};
if let Err(e) = run.await {
crit!(log, "Failed to start validator client"; "reason" => e);
// Ignore the error since it always occurs during normal operation when
// shutting down.
let _ = executor
.shutdown_sender()
.try_send("Failed to start validator client");
}
.compat(),
);
});
}
("remote_signer", Some(matches)) => {
if let Err(e) = remote_signer::run(&mut environment, matches) {