Update to Rust 1.59 and 2021 edition (#3038)

## Proposed Changes

Lots of lint updates related to `flat_map`, `unwrap_or_else` and string patterns. I did a little more creative refactoring in the op pool, but otherwise followed Clippy's suggestions.

## Additional Info

We need this PR to unblock CI.
This commit is contained in:
Michael Sproul
2022-02-25 00:10:17 +00:00
parent c1df5d29cb
commit 5e1f8a8480
115 changed files with 173 additions and 188 deletions

View File

@@ -2,7 +2,7 @@
name = "cached_tree_hash"
version = "0.1.0"
authors = ["Michael Sproul <michael@sigmaprime.io>"]
edition = "2018"
edition = "2021"
[dependencies]
ethereum-types = "0.12.1"

View File

@@ -127,7 +127,7 @@ impl<T: Encode + Decode> CacheArena<T> {
.offsets
.get(alloc_id + 1)
.copied()
.unwrap_or_else(|| self.backing.len());
.unwrap_or(self.backing.len());
Ok(end - start)
}
@@ -168,7 +168,7 @@ impl<T: Encode + Decode> CacheArena<T> {
.offsets
.get(alloc_id + 1)
.copied()
.unwrap_or_else(|| self.backing.len());
.unwrap_or(self.backing.len());
Ok(start..end)
}

View File

@@ -2,7 +2,7 @@
name = "fork_choice"
version = "0.1.0"
authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2018"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@@ -219,7 +219,7 @@ fn dequeue_attestations(
queued_attestations
.iter()
.position(|a| a.slot >= current_slot)
.unwrap_or_else(|| queued_attestations.len()),
.unwrap_or(queued_attestations.len()),
);
std::mem::replace(queued_attestations, remaining)

View File

@@ -613,7 +613,7 @@ fn justified_balances() {
}
macro_rules! assert_invalid_block {
($err: tt, $($error: pat) |+ $( if $guard: expr )?) => {
($err: tt, $($error: pat_param) |+ $( if $guard: expr )?) => {
assert!(
matches!(
$err,
@@ -719,7 +719,7 @@ fn invalid_block_finalized_descendant() {
}
macro_rules! assert_invalid_attestation {
($err: tt, $($error: pat) |+ $( if $guard: expr )?) => {
($err: tt, $($error: pat_param) |+ $( if $guard: expr )?) => {
assert!(
matches!(
$err,

View File

@@ -2,7 +2,7 @@
name = "int_to_bytes"
version = "0.2.0"
authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2018"
edition = "2021"
[dependencies]
bytes = "1.0.1"

View File

@@ -2,7 +2,7 @@
name = "merkle_proof"
version = "0.2.0"
authors = ["Michael Sproul <michael@sigmaprime.io>"]
edition = "2018"
edition = "2021"
[dependencies]
ethereum-types = "0.12.1"

View File

@@ -2,7 +2,7 @@
name = "proto_array"
version = "0.2.0"
authors = ["Paul Hauner <paul@sigmaprime.io>"]
edition = "2018"
edition = "2021"
[[bin]]
name = "proto_array"

View File

@@ -313,7 +313,7 @@ impl ProtoArray {
.indices
.get(justified_root)
.copied()
.ok_or_else(|| Error::JustifiedNodeUnknown(*justified_root))?;
.ok_or(Error::JustifiedNodeUnknown(*justified_root))?;
let justified_node = self
.nodes

View File

@@ -2,7 +2,7 @@
name = "safe_arith"
version = "0.1.0"
authors = ["Michael Sproul <michael@sigmaprime.io>"]
edition = "2018"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@@ -2,7 +2,7 @@
name = "eth2_serde_utils"
version = "0.1.1"
authors = ["Paul Hauner <paul@paulhauner.com", "Michael Sproul <michael@sigmaprime.io>"]
edition = "2018"
edition = "2021"
description = "Serialization and deserialization utilities useful for JSON representations of Ethereum 2.0 types."
license = "Apache-2.0"

View File

@@ -51,7 +51,7 @@ where
let raw = hex::encode(num.to_be_bytes());
let trimmed = raw.trim_start_matches('0');
let hex = if trimmed.is_empty() { "0" } else { &trimmed };
let hex = if trimmed.is_empty() { "0" } else { trimmed };
serializer.serialize_str(&format!("0x{}", &hex))
}

View File

@@ -2,7 +2,7 @@
name = "eth2_ssz"
version = "0.4.1"
authors = ["Paul Hauner <paul@sigmaprime.io>"]
edition = "2018"
edition = "2021"
description = "SimpleSerialize (SSZ) as used in Ethereum 2.0"
license = "Apache-2.0"

View File

@@ -187,12 +187,13 @@ impl<'a> SszDecoderBuilder<'a> {
let start = self.items_index;
self.items_index += ssz_fixed_len;
let slice = self.bytes.get(start..self.items_index).ok_or_else(|| {
DecodeError::InvalidByteLength {
len: self.bytes.len(),
expected: self.items_index,
}
})?;
let slice =
self.bytes
.get(start..self.items_index)
.ok_or(DecodeError::InvalidByteLength {
len: self.bytes.len(),
expected: self.items_index,
})?;
self.items.push(slice);
} else {
@@ -347,12 +348,12 @@ pub fn split_union_bytes(bytes: &[u8]) -> Result<(UnionSelector, &[u8]), DecodeE
/// Reads a `BYTES_PER_LENGTH_OFFSET`-byte length from `bytes`, where `bytes.len() >=
/// BYTES_PER_LENGTH_OFFSET`.
pub fn read_offset(bytes: &[u8]) -> Result<usize, DecodeError> {
decode_offset(bytes.get(0..BYTES_PER_LENGTH_OFFSET).ok_or_else(|| {
decode_offset(bytes.get(0..BYTES_PER_LENGTH_OFFSET).ok_or(
DecodeError::InvalidLengthPrefix {
len: bytes.len(),
expected: BYTES_PER_LENGTH_OFFSET,
}
})?)
},
)?)
}
/// Decode bytes as a little-endian usize, returning an `Err` if `bytes.len() !=

View File

@@ -2,7 +2,7 @@
name = "eth2_ssz_derive"
version = "0.3.0"
authors = ["Paul Hauner <paul@sigmaprime.io>"]
edition = "2018"
edition = "2021"
description = "Procedural derive macros to accompany the eth2_ssz crate."
license = "Apache-2.0"

View File

@@ -2,7 +2,7 @@
name = "eth2_ssz_types"
version = "0.2.2"
authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2018"
edition = "2021"
description = "Provides types with unique properties required for SSZ serialization and Merklization."
license = "Apache-2.0"

View File

@@ -2,7 +2,7 @@
name = "state_processing"
version = "0.2.0"
authors = ["Paul Hauner <paul@paulhauner.com>", "Michael Sproul <michael@sigmaprime.io>"]
edition = "2018"
edition = "2021"
[dev-dependencies]
env_logger = "0.9.0"

View File

@@ -3,7 +3,7 @@
not(test),
deny(
clippy::integer_arithmetic,
clippy::disallowed_method,
clippy::disallowed_methods,
clippy::indexing_slicing,
clippy::unwrap_used,
clippy::expect_used,

View File

@@ -177,7 +177,7 @@ where
Ok(SignatureSet::single_pubkey(
block.body().randao_reveal(),
get_pubkey(proposer_index).ok_or_else(|| Error::ValidatorUnknown(proposer_index as u64))?,
get_pubkey(proposer_index).ok_or(Error::ValidatorUnknown(proposer_index as u64))?,
message,
))
}
@@ -199,15 +199,13 @@ where
block_header_signature_set(
state,
&proposer_slashing.signed_header_1,
get_pubkey(proposer_index)
.ok_or_else(|| Error::ValidatorUnknown(proposer_index as u64))?,
get_pubkey(proposer_index).ok_or(Error::ValidatorUnknown(proposer_index as u64))?,
spec,
),
block_header_signature_set(
state,
&proposer_slashing.signed_header_2,
get_pubkey(proposer_index)
.ok_or_else(|| Error::ValidatorUnknown(proposer_index as u64))?,
get_pubkey(proposer_index).ok_or(Error::ValidatorUnknown(proposer_index as u64))?,
spec,
),
))
@@ -363,7 +361,7 @@ where
Ok(SignatureSet::single_pubkey(
&signed_exit.signature,
get_pubkey(proposer_index).ok_or_else(|| Error::ValidatorUnknown(proposer_index as u64))?,
get_pubkey(proposer_index).ok_or(Error::ValidatorUnknown(proposer_index as u64))?,
message,
))
}
@@ -521,7 +519,7 @@ where
{
let mut pubkeys = Vec::with_capacity(T::SyncSubcommitteeSize::to_usize());
for pubkey in pubkey_bytes {
pubkeys.push(get_pubkey(pubkey).ok_or_else(|| Error::ValidatorPubkeyUnknown(*pubkey))?);
pubkeys.push(get_pubkey(pubkey).ok_or(Error::ValidatorPubkeyUnknown(*pubkey))?);
}
let domain = spec.get_domain(epoch, Domain::SyncCommittee, fork, genesis_validators_root);

View File

@@ -4,7 +4,6 @@ use crate::per_epoch_processing::{
Delta, Error,
};
use safe_arith::SafeArith;
use std::array::IntoIter as ArrayIter;
use types::{BeaconState, ChainSpec, EthSpec};
/// Combination of several deltas for different components of an attestation reward.
@@ -30,13 +29,13 @@ impl AttestationDelta {
inactivity_penalty_delta,
} = self;
let mut result = Delta::default();
for delta in ArrayIter::new([
for delta in [
source_delta,
target_delta,
head_delta,
inclusion_delay_delta,
inactivity_penalty_delta,
]) {
] {
result.combine(delta)?;
}
Ok(result)

View File

@@ -2,7 +2,7 @@
name = "swap_or_not_shuffle"
version = "0.2.0"
authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2018"
edition = "2021"
[[bench]]
name = "benches"

View File

@@ -2,7 +2,7 @@
name = "tree_hash"
version = "0.4.1"
authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2018"
edition = "2021"
license = "Apache-2.0"
description = "Efficient Merkle-hashing as used in Ethereum 2.0"

View File

@@ -376,8 +376,8 @@ mod test {
fn compare_with_reference(leaves: &[Hash256], depth: usize) {
let reference_bytes = leaves
.iter()
.map(|hash| hash.as_bytes().to_vec())
.flatten()
.flat_map(|hash| hash.as_bytes())
.copied()
.collect::<Vec<_>>();
let reference_root = merkleize_padded(&reference_bytes, 1 << (depth - 1));

View File

@@ -2,7 +2,7 @@
name = "tree_hash_derive"
version = "0.4.0"
authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2018"
edition = "2021"
description = "Procedural derive macros to accompany the tree_hash crate."
license = "Apache-2.0"

View File

@@ -109,7 +109,7 @@ fn cached_tree_hash_attr_metas(attrs: &[Attribute]) -> Vec<Meta> {
fn should_skip_hashing(field: &syn::Field) -> bool {
field.attrs.iter().any(|attr| {
attr.path.is_ident("tree_hash")
&& attr.tokens.to_string().replace(" ", "") == "(skip_hashing)"
&& attr.tokens.to_string().replace(' ', "") == "(skip_hashing)"
})
}

View File

@@ -2,7 +2,7 @@
name = "types"
version = "0.2.0"
authors = ["Paul Hauner <paul@paulhauner.com>", "Age Manning <Age@AgeManning.com>"]
edition = "2018"
edition = "2021"
[[bench]]
name = "benches"

View File

@@ -1,5 +1,5 @@
#![allow(clippy::integer_arithmetic)]
#![allow(clippy::disallowed_method)]
#![allow(clippy::disallowed_methods)]
#![allow(clippy::indexing_slicing)]
use super::Error;

View File

@@ -7,7 +7,7 @@
not(test),
deny(
clippy::integer_arithmetic,
clippy::disallowed_method,
clippy::disallowed_methods,
clippy::indexing_slicing
)
)]