Skip to content
This repository has been archived by the owner on Oct 23, 2024. It is now read-only.

Update to 1.7.0 #92

Merged
merged 20 commits into from
Mar 6, 2024
Merged
96 changes: 68 additions & 28 deletions .github/workflows/validate_pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,40 +2,80 @@ name: Test and Check

on:
push:
branches:
- main
branches: [main]
pull_request:
branches: [main]
paths-ignore:
- "README.md"

workflow_dispatch:

jobs:
test:
concurrency:
group: test-${{ github.ref }}
cancel-in-progress: true
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Checkout code
uses: actions/checkout@v2

- name: Install toolchain
id: toolchain
uses: actions-rs/toolchain@master
with:
profile: minimal
toolchain: stable
target: wasm32-unknown-unknown

- name: Install protobuf-compiler
run: sudo apt-get install protobuf-compiler

- name: Rust Cache
uses: Swatinem/rust-cache@v1.3.0

- uses: actions-rs/toolchain@v1
with:
toolchain: nightly-2023-07-16
components: clippy
- name: Test workspace
run: cargo test --package pallet-eth2-light-client

rustfmt:
concurrency:
group: rustfmt-${{ github.ref }}
cancel-in-progress: true
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Install latest stable
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true

- name: Install toolchain
id: toolchain
uses: actions-rs/toolchain@master
with:
profile: minimal
toolchain: nightly-2023-07-16
target: wasm32-unknown-unknown

- name: Install protobuf-compiler
run: sudo apt-get install protobuf-compiler

- name: Rust Cache
uses: Swatinem/rust-cache@v1.3.0

- name: Test workspace
run: cargo test --package pallet-eth2-light-client

- name: Clippy
run: cargo clippy --package pallet-eth2-light-client -- -D warnings
components: rustfmt

- name: Rustfmt check
run: cargo fmt --all -- --check

clippy:
concurrency:
group: clippy-${{ github.ref }}
cancel-in-progress: true
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3

- name: Install toolchain
id: toolchain
uses: actions-rs/toolchain@master
with:
profile: minimal
toolchain: stable
target: wasm32-unknown-unknown

- name: Rust Cache
uses: Swatinem/rust-cache@v1.3.0

- name: Install Protobuf
run: sudo apt-get install protobuf-compiler

- name: Run clippy
run: cargo clippy --package pallet-eth2-light-client -- -D warnings
146 changes: 71 additions & 75 deletions Cargo.toml

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions crates/bls/src/generic_aggregate_signature.rs
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ where
/// Verify that `self` represents an aggregate signature where all `pubkeys` have signed `msg`.
pub fn fast_aggregate_verify(&self, msg: Hash256, pubkeys: &[&GenericPublicKey<Pub>]) -> bool {
if pubkeys.is_empty() {
return false
return false;
}

match self.point.as_ref() {
Expand Down Expand Up @@ -227,7 +227,7 @@ where
/// This function only exists for EF tests, it's presently not used in production.
pub fn aggregate_verify(&self, msgs: &[Hash256], pubkeys: &[&GenericPublicKey<Pub>]) -> bool {
if msgs.is_empty() || msgs.len() != pubkeys.len() {
return false
return false;
}

match self.point.as_ref() {
Expand Down
4 changes: 2 additions & 2 deletions crates/bls/src/impls/milagro.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ pub fn verify_signature_sets<'a>(
seed: [u8; 32],
) -> bool {
if signature_sets.len() == 0 {
return false
return false;
}

signature_sets
Expand All @@ -46,7 +46,7 @@ pub fn verify_signature_sets<'a>(
}

if signature_set.signature.point().is_none() {
return Err(())
return Err(());
}

Ok((signature_set.signature.as_ref(), aggregate, signature_set.message))
Expand Down
10 changes: 6 additions & 4 deletions crates/bls/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,12 @@ impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Error::MilagroError(_) => write!(f, "MilagroError"),
Error::InvalidByteLength { got, expected } =>
write!(f, "InvalidByteLength {{ got: {got}, expected: {expected} }}"),
Error::InvalidSecretKeyLength { got, expected } =>
write!(f, "InvalidSecretKeyLength {{ got: {got}, expected: {expected} }}"),
Error::InvalidByteLength { got, expected } => {
write!(f, "InvalidByteLength {{ got: {got}, expected: {expected} }}")
},
Error::InvalidSecretKeyLength { got, expected } => {
write!(f, "InvalidSecretKeyLength {{ got: {got}, expected: {expected} }}")
},
Error::InvalidInfinityPublicKey => write!(f, "InvalidInfinityPublicKey"),
Error::InvalidZeroSecretKey => write!(f, "InvalidZeroSecretKey"),
}
Expand Down
3 changes: 0 additions & 3 deletions crates/bls/src/macros.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
#[cfg(feature = "std")]
pub use eth2_serde_utils::hex as hex_encode;

/// Contains the functions required for a `TreeHash` implementation.
///
/// Does not include the `Impl` section since it gets very complicated when it comes to generics.
Expand Down
4 changes: 2 additions & 2 deletions crates/bls/tests/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,8 +96,8 @@ macro_rules! test_suite {
#[test]
fn partial_eq_infinity_agg_sig_and_real_agg_sig() {
assert!(
AggregateSignature::infinity() !=
AggregateSignatureTester::new_with_single_msg(1).sig
AggregateSignature::infinity()
!= AggregateSignatureTester::new_with_single_msg(1).sig
)
}

Expand Down
2 changes: 1 addition & 1 deletion crates/consensus-types/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ eth2-serde-utils = { package = "webb-eth2-serde-utils", path = "../serde-utils",

bitvec = { workspace = true, features = ["atomic", "alloc"] }
hex = { workspace = true }
codec = { workspace = true }
codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false }
scale-info = { workspace = true }
serde = { workspace = true }
rlp = { workspace = true }
Expand Down
6 changes: 3 additions & 3 deletions crates/consensus-types/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ pub fn compute_fork_version(
fork_version: ForkVersion,
) -> Option<ForkVersion> {
if epoch >= bellatrix_epoch {
return Some(fork_version)
return Some(fork_version);
}

None
Expand All @@ -66,7 +66,7 @@ pub fn compute_fork_version_by_slot(
// Compute floor of log2 of a u32.
pub const fn floorlog2(x: u32) -> u32 {
if x == 0 {
return 0
return 0;
}
31 - x.leading_zeros()
}
Expand Down Expand Up @@ -119,7 +119,7 @@ pub fn convert_branch(branch: &[H256]) -> Vec<ethereum_types::H256> {
pub fn validate_beacon_block_header_update(header_update: &HeaderUpdate) -> bool {
let branch = convert_branch(&header_update.execution_hash_branch);
if branch.len() != EXECUTION_PROOF_SIZE {
return false
return false;
}

let l2_proof = &branch[0..L2_EXECUTION_PAYLOAD_PROOF_SIZE];
Expand Down
4 changes: 2 additions & 2 deletions crates/consensus-types/src/network_config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,11 +74,11 @@ impl NetworkConfig {

pub fn compute_fork_version(&self, epoch: Epoch) -> Option<ForkVersion> {
if epoch >= self.capella_fork_epoch {
return Some(self.capella_fork_version)
return Some(self.capella_fork_version);
}

if epoch >= self.bellatrix_fork_epoch {
return Some(self.bellatrix_fork_version)
return Some(self.bellatrix_fork_version);
}

None
Expand Down
15 changes: 8 additions & 7 deletions crates/eth-rpc-client/src/beacon_rpc_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -249,8 +249,9 @@ impl BeaconRPCClient {
let light_client_snapshot_json_str = self.get_json_from_raw_request(&url).await?;
let parsed_json: Value = serde_json::from_str(&light_client_snapshot_json_str)?;
let beacon_header: BeaconBlockHeader = match self.routes.version {
BeaconRPCVersion::V1_5 =>
serde_json::from_value(parsed_json["data"]["header"]["beacon"].clone())?,
BeaconRPCVersion::V1_5 => {
serde_json::from_value(parsed_json["data"]["header"]["beacon"].clone())?
},
_ => serde_json::from_value(parsed_json["data"]["header"].clone())?,
};

Expand Down Expand Up @@ -358,7 +359,7 @@ impl BeaconRPCClient {
trace!(target: "relay", "Beacon chain request: {}", url);
let json_str = client.get(url).await?;
if serde_json::from_str::<Value>(&json_str).is_err() {
return Err(FailOnGettingJson { response: json_str }.into())
return Err(FailOnGettingJson { response: json_str }.into());
}

Ok(json_str)
Expand Down Expand Up @@ -453,13 +454,13 @@ impl BeaconRPCClient {
.sync_committee_signature
) == serde_json::to_string(&sync_aggregate.sync_committee_signature)?
{
break
break;
}
}

signature_slot += 1;
if signature_slot - attested_header.slot > CHECK_SLOTS_FORWARD_LIMIT {
return Err(SignatureSlotNotFoundError.into())
return Err(SignatureSlotNotFoundError.into());
}
}

Expand Down Expand Up @@ -584,7 +585,7 @@ impl BeaconRPCClient {
if parse_json.is_object() {
if let Some(msg_str) = parse_json["message"].as_str() {
if msg_str.contains("No block found for") {
return Err(NoBlockForSlotError.into())
return Err(NoBlockForSlotError.into());
}
}
}
Expand All @@ -606,7 +607,7 @@ mod tests {
const TIMEOUT_STATE_SECONDS: u64 = 1000;

fn get_test_config() -> ConfigForTests {
ConfigForTests::load_from_toml("config_for_tests.toml".try_into().unwrap())
ConfigForTests::load_from_toml("config_for_tests.toml".into())
}

#[test]
Expand Down
4 changes: 2 additions & 2 deletions crates/eth-rpc-client/src/eth1_rpc_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ impl Eth1RPCClient {
let val: Value = serde_json::from_str(&res)?;
let is_sync = val["result"].as_bool();
if let Some(is_sync_val) = is_sync {
return Ok(is_sync_val)
return Ok(is_sync_val);
}

Ok(true)
Expand All @@ -76,7 +76,7 @@ mod tests {
use crate::{config_for_tests::ConfigForTests, eth1_rpc_client::Eth1RPCClient};

fn get_test_config() -> ConfigForTests {
ConfigForTests::load_from_toml("config_for_tests.toml".try_into().unwrap())
ConfigForTests::load_from_toml("config_for_tests.toml".into())
}

#[tokio::test]
Expand Down
4 changes: 2 additions & 2 deletions crates/eth-rpc-client/src/execution_block_proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ impl ExecutionBlockProof {
index: usize,
) -> Result<H256, IncorrectBranchLength> {
if branch.len() != depth {
return Err(IncorrectBranchLength)
return Err(IncorrectBranchLength);
}

let mut merkle_root = leaf.as_bytes().to_vec();
Expand Down Expand Up @@ -148,7 +148,7 @@ mod tests {
const TIMEOUT_STATE_SECONDS: u64 = 1000;

fn get_test_config() -> ConfigForTests {
ConfigForTests::load_from_toml("config_for_tests.toml".try_into().unwrap())
ConfigForTests::load_from_toml("config_for_tests.toml".into())
}

#[test]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,12 +116,12 @@ impl HandMadeFinalityLightClientUpdate {
sync_committee_bits.into_iter().map(|x| x.count_ones()).sum();
if sync_committee_bits_sum * 3 < (64 * 8 * 2) {
current_attested_slot = signature_slot;
continue
continue;
}

if signature_beacon_body.attestations().is_empty() {
current_attested_slot = signature_slot;
continue
continue;
}

let mut attested_slots: Vec<u64> = signature_beacon_body
Expand All @@ -132,8 +132,8 @@ impl HandMadeFinalityLightClientUpdate {
attested_slots.sort();

for i in (0..attested_slots.len()).rev() {
if (i == attested_slots.len() - 1 || attested_slots[i + 1] != attested_slots[i]) &&
attested_slots[i] >= attested_slot
if (i == attested_slots.len() - 1 || attested_slots[i + 1] != attested_slots[i])
&& attested_slots[i] >= attested_slot
{
current_attested_slot = attested_slots[i];

Expand All @@ -142,10 +142,10 @@ impl HandMadeFinalityLightClientUpdate {
.await
{
if err.downcast_ref::<NoBlockForSlotError>().is_none() {
return Err(err)
return Err(err);
}
} else {
return Ok((current_attested_slot, signature_slot))
return Ok((current_attested_slot, signature_slot));
}
}
}
Expand Down Expand Up @@ -334,7 +334,7 @@ mod tests {
const TIMEOUT_STATE_SECONDS: u64 = 1000000;

fn get_test_config() -> ConfigForTests {
ConfigForTests::load_from_toml("config_for_tests.toml".try_into().unwrap())
ConfigForTests::load_from_toml("config_for_tests.toml".into())
}

fn cmp_light_client_updates(
Expand Down
4 changes: 2 additions & 2 deletions crates/eth-rpc-client/src/utils.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
pub fn trim_quotes(s: String) -> String {
let mut res_str = s;
if (res_str.starts_with('"') && res_str.ends_with('"')) ||
(res_str.starts_with('\'') && res_str.ends_with('\''))
if (res_str.starts_with('"') && res_str.ends_with('"'))
|| (res_str.starts_with('\'') && res_str.ends_with('\''))
{
res_str.pop();
res_str.remove(0);
Expand Down
2 changes: 1 addition & 1 deletion crates/eth-types/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ tree-hash = { package = "webb-tree-hash", path = "../tree-hash", default-featur
tree-hash-derive = { package = "webb-tree-hash-derive", path = "../tree-hash-derive", default-features = false }
eth2-serde-utils = { package = "webb-eth2-serde-utils", path = "../serde-utils", default-features = false, optional = true }
hex = { workspace = true }
codec = { workspace = true }
codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false }
scale-info = { workspace = true }
serde = { workspace = true }
rlp = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion crates/eth-types/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ impl RlpDecodable for BlockHeader {
);

if block_header.hash.unwrap() != keccak256(serialized.as_raw()).into() {
return Err(RlpDecoderError::RlpInconsistentLengthAndData)
return Err(RlpDecoderError::RlpInconsistentLengthAndData);
}

block_header.partial_hash = Some(
Expand Down
Loading
Loading