Files
lighthouse/lcli/src/http_sync.rs
Mac L f5809aff87 Bump ssz_types to v0.12.2 (#8032)
https://github.com/sigp/lighthouse/issues/8012


  Replace all instances of `VariableList::from` and `FixedVector::from` to their `try_from` variants.

While I tried to use proper error handling in most cases, there were certain situations where adding an `expect` for situations where `try_from` can trivially never fail avoided adding a lot of extra complexity.


Co-Authored-By: Mac L <mjladson@pm.me>

Co-Authored-By: Michael Sproul <michaelsproul@users.noreply.github.com>

Co-Authored-By: Michael Sproul <michael@sigmaprime.io>
2025-10-28 04:01:09 +00:00

154 lines
5.1 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
use clap::ArgMatches;
use clap_utils::{parse_optional, parse_required};
use environment::Environment;
use eth2::{
BeaconNodeHttpClient, Error, SensitiveUrl, Timeouts,
types::{BlockId, ChainSpec, ForkName, PublishBlockRequest, SignedBlockContents},
};
use eth2_network_config::Eth2NetworkConfig;
use ssz::Encode;
use std::fs;
use std::fs::File;
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::Duration;
use types::EthSpec;
const HTTP_TIMEOUT: Duration = Duration::from_secs(3600);
const DEFAULT_CACHE_DIR: &str = "./cache";
pub fn run<T: EthSpec>(
env: Environment<T>,
network_config: Eth2NetworkConfig,
matches: &ArgMatches,
) -> Result<(), String> {
let executor = env.core_context().executor;
executor
.handle()
.ok_or("shutdown in progress")?
.block_on(async move { run_async::<T>(network_config, matches).await })
}
pub async fn run_async<T: EthSpec>(
network_config: Eth2NetworkConfig,
matches: &ArgMatches,
) -> Result<(), String> {
let spec = &network_config.chain_spec::<T>()?;
let source_url: SensitiveUrl = parse_required(matches, "source-url")?;
let target_url: SensitiveUrl = parse_required(matches, "target-url")?;
let start_block: BlockId = parse_required(matches, "start-block")?;
let maybe_common_ancestor_block: Option<BlockId> =
parse_optional(matches, "knowncommon-ancestor")?;
let cache_dir_path: PathBuf =
parse_optional(matches, "block-cache-dir")?.unwrap_or(DEFAULT_CACHE_DIR.into());
let source = BeaconNodeHttpClient::new(source_url, Timeouts::set_all(HTTP_TIMEOUT));
let target = BeaconNodeHttpClient::new(target_url, Timeouts::set_all(HTTP_TIMEOUT));
if !cache_dir_path.exists() {
fs::create_dir_all(&cache_dir_path)
.map_err(|e| format!("Unable to create block cache dir: {:?}", e))?;
}
// 1. Download blocks back from head, looking for common ancestor.
let mut blocks = vec![];
let mut next_block_id = start_block;
loop {
println!("downloading {next_block_id:?}");
let publish_block_req =
get_block_from_source::<T>(&source, next_block_id, spec, &cache_dir_path).await;
let block = publish_block_req.signed_block();
next_block_id = BlockId::Root(block.parent_root());
blocks.push((block.slot(), publish_block_req));
if let Some(ref common_ancestor_block) = maybe_common_ancestor_block
&& common_ancestor_block == &next_block_id
{
println!("reached known common ancestor: {next_block_id:?}");
break;
}
let block_exists_in_target = target
.get_beacon_blocks_ssz::<T>(next_block_id, spec)
.await
.unwrap()
.is_some();
if block_exists_in_target {
println!("common ancestor found: {next_block_id:?}");
break;
}
}
// 2. Apply blocks to target.
for (slot, block) in blocks.iter().rev() {
println!("posting block at slot {slot}");
if let Err(e) = target.post_beacon_blocks(block).await {
if let Error::ServerMessage(ref e) = e
&& e.code == 202
{
println!("duplicate block detected while posting block at slot {slot}");
continue;
}
return Err(format!("error posting {slot}: {e:?}"));
} else {
println!("success");
}
}
println!("SYNCED!!!!");
Ok(())
}
async fn get_block_from_source<T: EthSpec>(
source: &BeaconNodeHttpClient,
block_id: BlockId,
spec: &ChainSpec,
cache_dir_path: &Path,
) -> PublishBlockRequest<T> {
let mut cache_path = cache_dir_path.join(format!("block_{block_id}"));
if cache_path.exists() {
let mut f = File::open(&cache_path).unwrap();
let mut bytes = vec![];
f.read_to_end(&mut bytes).unwrap();
PublishBlockRequest::from_ssz_bytes(&bytes, ForkName::Deneb).unwrap()
} else {
let block_from_source = source
.get_beacon_blocks_ssz::<T>(block_id, spec)
.await
.unwrap()
.unwrap();
let blobs_from_source = source
.get_blob_sidecars::<T>(block_id, None, spec)
.await
.unwrap()
.unwrap()
.into_data();
let (kzg_proofs, blobs): (Vec<_>, Vec<_>) = blobs_from_source
.iter()
.cloned()
.map(|sidecar| (sidecar.kzg_proof, sidecar.blob.clone()))
.unzip();
let block_root = block_from_source.canonical_root();
let block_contents = SignedBlockContents {
signed_block: Arc::new(block_from_source),
kzg_proofs: kzg_proofs.try_into().unwrap(),
blobs: blobs.try_into().unwrap(),
};
let publish_block_req = PublishBlockRequest::BlockContents(block_contents);
cache_path = cache_dir_path.join(format!("block_{block_root:?}"));
let mut f = File::create(&cache_path).unwrap();
f.write_all(&publish_block_req.as_ssz_bytes()).unwrap();
publish_block_req
}
}