Fix lots of typos.

This commit is contained in:
John Adler
2019-07-26 15:26:06 -04:00
parent b5af73d056
commit fec7168512
41 changed files with 68 additions and 68 deletions

View File

@@ -23,7 +23,7 @@ impl AggregateSignature {
/// Instantiate a new AggregateSignature.
///
/// is_empty is false
/// AggregateSiganture is point at infinity
/// AggregateSignature is point at infinity
pub fn new() -> Self {
Self {
aggregate_signature: RawAggregateSignature::new(),
@@ -87,7 +87,7 @@ impl AggregateSignature {
.verify_multiple(&msg[..], domain, &aggregate_public_keys[..])
}
/// Return AggregateSiganture as bytes
/// Return AggregateSignature as bytes
pub fn as_bytes(&self) -> Vec<u8> {
if self.is_empty {
return vec![0; BLS_AGG_SIG_BYTE_SIZE];
@@ -95,7 +95,7 @@ impl AggregateSignature {
self.aggregate_signature.as_bytes()
}
/// Convert bytes to AggregateSiganture
/// Convert bytes to AggregateSignature
pub fn from_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
for byte in bytes {
if *byte != 0 {
@@ -114,7 +114,7 @@ impl AggregateSignature {
Ok(Self::empty_signature())
}
/// Returns if the AggregateSiganture `is_empty`
/// Returns if the AggregateSignature `is_empty`
pub fn is_empty(&self) -> bool {
self.is_empty
}

View File

@@ -64,7 +64,7 @@ pub struct BTreeOverlay {
}
impl BTreeOverlay {
/// Instantiates a new instance for `item`, where it's first chunk is `inital_offset` and has
/// Instantiates a new instance for `item`, where it's first chunk is `initial_offset` and has
/// the specified `depth`.
pub fn new<T>(item: &T, initial_offset: usize, depth: usize) -> Self
where

View File

@@ -201,7 +201,7 @@ pub fn update_tree_hash_cache<T: CachedTreeHash>(
cache.chunk_index = new.end;
}
// The list has been lengthened and this is a new item that was prevously a
// The list has been lengthened and this is a new item that was previously a
// padding item.
//
// Splice the tree for the new item over the padding chunk.
@@ -268,7 +268,7 @@ pub fn update_tree_hash_cache<T: CachedTreeHash>(
// This leaf was padding in both lists, there's nothing to do.
(LeafNode::Padding, LeafNode::Padding) => (),
// As we are looping through the larger of the lists of leaf nodes, it should
// be impossible for either leaf to be non-existant.
// be impossible for either leaf to be non-existent.
(LeafNode::DoesNotExist, LeafNode::DoesNotExist) => unreachable!(),
}
}

View File

@@ -139,7 +139,7 @@ impl TreeHashCache {
}
/// Instantiate a new cache from the pre-built `bytes` where each `self.chunk_modified` will be
/// set to `intitial_modified_state`.
/// set to `initial_modified_state`.
///
/// Note: `bytes.len()` must be a multiple of 32
pub fn from_bytes(

View File

@@ -29,7 +29,7 @@ impl<'a, T> Iterator for Split<'a, T> {
}
}
/// Splits a slice into chunks of size n. All postive n values are applicable,
/// Splits a slice into chunks of size n. All positive n values are applicable,
/// hence the honey_badger prefix.
///
/// Returns an iterator over the original list.

View File

@@ -213,7 +213,7 @@ return rawbytes[current_index+4:current_index+4+bytes_length], new_index
#### List
Deserailize each object in the list.
Deserialize each object in the list.
1. Get the length of the serialized list.
2. Loop through deseralizing each item in the list until you reach the
entire length of the list.
@@ -437,7 +437,7 @@ let decoded: Result<(Vec<usize>, usize), DecodeError> = decode_ssz_list( &encode
Deserializes the "length" value in the serialized bytes from the index. The
length of bytes is given (usually 4 stated in the reference implementation) and
is often the value appended to the list infront of the actual serialized
is often the value appended to the list in front of the actual serialized
object.
| Parameter | Description |

View File

@@ -535,7 +535,7 @@ mod tests {
}
#[test]
fn awkward_fixed_lenth_portion() {
fn awkward_fixed_length_portion() {
assert_eq!(
<Vec<Vec<u16>>>::from_ssz_bytes(&[10, 0, 0, 0, 10, 0, 0, 0, 0, 0]),
Err(DecodeError::InvalidByteLength {

View File

@@ -47,7 +47,7 @@ pub fn merkleize_standard(bytes: &[u8]) -> Vec<u8> {
j -= HASHSIZE;
let hash = match o.get(i..i + MERKLE_HASH_CHUNK) {
// All bytes are available, hash as ususal.
// All bytes are available, hash as usual.
Some(slice) => hash(slice),
// Unable to get all the bytes.
None => {

View File

@@ -27,7 +27,7 @@ fn get_hashable_named_field_idents<'a>(struct_data: &'a syn::DataStruct) -> Vec<
.collect()
}
/// Returns true if some field has an attribute declaring it should not be hashedd.
/// Returns true if some field has an attribute declaring it should not be hashed.
///
/// The field attribute is: `#[tree_hash(skip_hashing)]`
fn should_skip_hashing(field: &syn::Field) -> bool {