diff --git a/.github/workflows/build-test-windows.yml b/.github/workflows/build-test-windows.yml index a5625d353b..ca6ce25704 100644 --- a/.github/workflows/build-test-windows.yml +++ b/.github/workflows/build-test-windows.yml @@ -13,7 +13,7 @@ jobs: platform: - windows2019 # custom runner toolchain: - - 1.52.1 + - latest runs-on: ${{ matrix.platform }} steps: - name: Checkout sources diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index d097eb6833..6f0cfa2315 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -15,7 +15,7 @@ jobs: - ubuntu-16.04 - macos-latest toolchain: - - 1.52.1 + - latest runs-on: ${{ matrix.platform }} steps: - name: Checkout sources diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 305682afa7..f30ff67e5d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -20,7 +20,7 @@ jobs: - ubuntu-16.04 - macos-latest toolchain: - - 1.52.1 + - latest runs-on: ${{ matrix.platform }} steps: - name: Checkout sources diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml index 90c7f474c7..401bd0c0ce 100644 --- a/.github/workflows/check.yml +++ b/.github/workflows/check.yml @@ -9,16 +9,16 @@ on: jobs: check: name: Check - runs-on: ubuntu-16.04 + runs-on: ubuntu-20.04 steps: - name: Checkout sources uses: actions/checkout@main with: submodules: true - - name: Install 1.52.1 toolchain + - name: Install 1.59 toolchain uses: actions-rs/toolchain@v1 with: - toolchain: 1.52.1 + toolchain: 1.59 profile: minimal override: true - name: Run cargo check 1/3 diff --git a/.github/workflows/deploy-docker-nightly.yml b/.github/workflows/deploy-docker-nightly.yml index a0eb3db7c1..372570f7d0 100644 --- a/.github/workflows/deploy-docker-nightly.yml +++ b/.github/workflows/deploy-docker-nightly.yml @@ -16,7 +16,7 @@ jobs: - name: Install toolchain uses: actions-rs/toolchain@v1 with: - toolchain: 1.52.1 + toolchain: latest profile: minimal override: true - name: Deploy to docker hub diff --git a/.github/workflows/deploy-docker-tag.yml b/.github/workflows/deploy-docker-tag.yml index 4114d5e71f..d14dfdbb27 100644 --- a/.github/workflows/deploy-docker-tag.yml +++ b/.github/workflows/deploy-docker-tag.yml @@ -17,7 +17,7 @@ jobs: - name: Install toolchain uses: actions-rs/toolchain@v1 with: - toolchain: 1.52.1 + toolchain: latest profile: minimal override: true - name: Deploy to docker hub diff --git a/.github/workflows/fmt.yml b/.github/workflows/fmt.yml index 6600911ed0..d7ef4a52c2 100644 --- a/.github/workflows/fmt.yml +++ b/.github/workflows/fmt.yml @@ -5,13 +5,13 @@ name: rustfmt jobs: fmt: name: Rustfmt - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.52.1 + toolchain: 1.59 override: true - run: rustup component add rustfmt - uses: actions-rs/cargo@v1 diff --git a/Cargo.lock b/Cargo.lock index 233ae1b112..640df6626f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -203,6 +203,12 @@ dependencies = [ "byteorder", ] +[[package]] +name = "beef" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bed554bd50246729a1ec158d08aa3235d1b69d94ad120ebe187e28894787e736" + [[package]] name = "bincode" version = "1.3.2" @@ -851,7 +857,7 @@ dependencies = [ "itertools 0.7.11", "keccak-hash", "lazy_static", - "lunarity-lexer", + "logos", "regex 1.3.9", "rustc-hex 2.1.0", "serde", @@ -1734,15 +1740,6 @@ dependencies = [ "num_cpus", ] -[[package]] -name = "fxhash" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" -dependencies = [ - "byteorder", -] - [[package]] name = "gcc" version = "0.3.55" @@ -2674,24 +2671,25 @@ dependencies = [ [[package]] name = "logos" -version = "0.7.7" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60ca690691528b32832c7e8aaae8ae1edcdee4e9ffde55b2d31a4795bc7a12d0" +checksum = "427e2abca5be13136da9afdbf874e6b34ad9001dd70f2b103b083a85daa7b345" dependencies = [ "logos-derive", - "toolshed", ] [[package]] name = "logos-derive" -version = "0.7.7" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "917dccdd529d5681f3d28b26bcfdafd2ed67fe4f26d15b5ac679f67b55279f3d" +checksum = "56a7d287fd2ac3f75b11f19a1c8a874a7d55744bd91f7a1b3e7cf87d4343c36d" dependencies = [ - "proc-macro2 0.4.30", - "quote 0.6.13", + "beef", + "fnv", + "proc-macro2 1.0.20", + "quote 1.0.7", "regex-syntax 0.6.18", - "syn 0.15.26", + "syn 1.0.40", "utf8-ranges", ] @@ -2713,15 +2711,6 @@ dependencies = [ "linked-hash-map", ] -[[package]] -name = "lunarity-lexer" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28a5446c03ed5bd4ae2cca322c4c84d9bd9741b6788f75c404719474cb63d3b7" -dependencies = [ - "logos", -] - [[package]] name = "macros" version = "0.1.0" @@ -5259,15 +5248,6 @@ dependencies = [ "serde", ] -[[package]] -name = "toolshed" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54a272adbf14cfbb486774d09ee3e00c38d488cd390084a528f70e10e3a184a8" -dependencies = [ - "fxhash", -] - [[package]] name = "trace-time" version = "0.1.2" diff --git a/bin/oe/cli/usage.rs b/bin/oe/cli/usage.rs index 9a8f8cc9b0..fe6dfb3acc 100644 --- a/bin/oe/cli/usage.rs +++ b/bin/oe/cli/usage.rs @@ -18,9 +18,7 @@ macro_rules! return_if_parse_error { ($e:expr) => { match $e { Err( - clap_error - @ - ClapError { + clap_error @ ClapError { kind: ClapErrorKind::ValueValidation, .. }, diff --git a/bin/oe/informant.rs b/bin/oe/informant.rs index 6014e2b019..ae205f6fc5 100644 --- a/bin/oe/informant.rs +++ b/bin/oe/informant.rs @@ -354,9 +354,7 @@ impl ChainNotify for Informant { if new_blocks.has_more_blocks_to_import { return; } - info!(target: "import", "Informant: trying to get lock."); let mut last_import = self.last_import.lock(); - info!(target: "import", "Informant: got lock."); let client = &self.target.client; let importing = self.target.is_major_importing(); diff --git a/crates/concensus/miner/src/local_accounts.rs b/crates/concensus/miner/src/local_accounts.rs index b562459c7a..5d172cd6fd 100644 --- a/crates/concensus/miner/src/local_accounts.rs +++ b/crates/concensus/miner/src/local_accounts.rs @@ -23,7 +23,7 @@ use ethereum_types::Address; /// Local accounts checker pub trait LocalAccounts: Send + Sync { /// Returns true if given address should be considered local account. - fn is_local(&self, &Address) -> bool; + fn is_local(&self, address: &Address) -> bool; } impl LocalAccounts for HashSet
{ diff --git a/crates/ethcore/src/engines/hbbft/hbbft_engine.rs b/crates/ethcore/src/engines/hbbft/hbbft_engine.rs index 472e3f6ef1..9b2a533342 100644 --- a/crates/ethcore/src/engines/hbbft/hbbft_engine.rs +++ b/crates/ethcore/src/engines/hbbft/hbbft_engine.rs @@ -48,7 +48,7 @@ use engines::hbbft::{ contracts::validator_set::{ get_validator_available_since, send_tx_announce_availability, staking_by_mining_address, }, - hbbft_message_memorium::HbbftMessageMemorium, + hbbft_message_memorium::HbbftMessageDispatcher, }; use std::{ops::Deref, sync::atomic::Ordering}; @@ -70,7 +70,7 @@ pub struct HoneyBadgerBFT { signer: Arc>>>, machine: EthereumMachine, hbbft_state: RwLock, - hbbft_message_memorial: RwLock, + hbbft_message_dispatcher: RwLock, sealing: RwLock>, params: HbbftParams, message_counter: RwLock, @@ -210,7 +210,7 @@ impl HoneyBadgerBFT { signer: Arc::new(RwLock::new(None)), machine, hbbft_state: RwLock::new(HbbftState::new()), - hbbft_message_memorial: RwLock::new(HbbftMessageMemorium::new()), + hbbft_message_dispatcher: RwLock::new(HbbftMessageDispatcher::new()), sealing: RwLock::new(BTreeMap::new()), params, message_counter: RwLock::new(0), @@ -332,7 +332,7 @@ impl HoneyBadgerBFT { trace!(target: "consensus", "Received message of idx {} {:?} from {}", msg_idx, message, sender_id); // store received messages here. - self.hbbft_message_memorial + self.hbbft_message_dispatcher .write() .on_message_received(&message); @@ -357,7 +357,7 @@ impl HoneyBadgerBFT { block_num: BlockNumber, ) -> Result<(), EngineError> { // store received messages here. - self.hbbft_message_memorial + self.hbbft_message_dispatcher .write() .on_sealing_message_received(&message, block_num); @@ -966,7 +966,7 @@ impl Engine for HoneyBadgerBFT { } } - self.hbbft_message_memorial + self.hbbft_message_dispatcher .write() .free_memory(block.header.number()); diff --git a/crates/ethcore/src/engines/hbbft/hbbft_message_memorium.rs b/crates/ethcore/src/engines/hbbft/hbbft_message_memorium.rs index 09fcba980a..f9e0630506 100644 --- a/crates/ethcore/src/engines/hbbft/hbbft_message_memorium.rs +++ b/crates/ethcore/src/engines/hbbft/hbbft_message_memorium.rs @@ -1,5 +1,7 @@ //use hbbft::honey_badger::{self, MessageContent}; use hbbft::honey_badger::{self}; +use parking_lot::RwLock; +use std::collections::VecDeque; // use threshold_crypto::{SignatureShare}; use engines::hbbft::{sealing, NodeId}; @@ -42,10 +44,62 @@ pub(crate) struct HbbftMessageMemorium { // */ // agreements: BTreeMap>, message_tracking_id: u64, - config_blocks_to_keep_on_disk: u64, - last_block_deleted_from_disk: u64, + dispatched_messages: VecDeque, + dispatched_seals: VecDeque<(sealing::Message, u64)>, +} + +pub(crate) struct HbbftMessageDispatcher { + thread: Option>, + memorial: std::sync::Arc>, +} + +impl HbbftMessageDispatcher { + pub fn new() -> Self { + HbbftMessageDispatcher { + thread: None, + memorial: std::sync::Arc::new(RwLock::new(HbbftMessageMemorium::new())), + } + } + + pub fn on_sealing_message_received(&mut self, message: &sealing::Message, epoch: u64) { + self.memorial + .write() + .dispatched_seals + .push_back((message.clone(), epoch)); + + self.ensure_worker_thread(); + } + + pub fn on_message_received(&mut self, message: &HbMessage) { + //performance: dispatcher pattern + multithreading could improve performance a lot. + + self.memorial + .write() + .dispatched_messages + .push_back(message.clone()); + + self.ensure_worker_thread(); + } + + fn ensure_worker_thread(&mut self) { + if self.thread.is_none() { + // let mut memo = self; + // let mut arc = std::sync::Arc::new(&self); + let arc_clone = self.memorial.clone(); + self.thread = Some(std::thread::spawn(move || loop { + let work_result = arc_clone.write().work_message(); + if !work_result { + std::thread::sleep(std::time::Duration::from_millis(250)); + } + })); + } + } + + pub fn free_memory(&mut self, _current_block: u64) { + // TODO: make memorium freeing memory of ancient block. + } } impl HbbftMessageMemorium { @@ -57,10 +111,12 @@ impl HbbftMessageMemorium { message_tracking_id: 0, config_blocks_to_keep_on_disk: 200, last_block_deleted_from_disk: 0, + dispatched_messages: VecDeque::new(), + dispatched_seals: VecDeque::new(), } } - pub fn on_message_string_received(&mut self, message_json: String, epoch: u64) { + fn on_message_string_received(&mut self, message_json: String, epoch: u64) { self.message_tracking_id += 1; //don't pick up messages if we do not keep any. @@ -130,29 +186,48 @@ impl HbbftMessageMemorium { } } - pub fn on_message_received(&mut self, message: &HbMessage) { - //performance: dispatcher pattern + multithreading could improve performance a lot. - - let epoch = message.epoch(); + fn work_message(&mut self) -> bool { + // warn!(target: "consensus", "working on hbbft messages: {} consensuns: {}", self.dispatched_messages.len(), self.dispatched_seals.len()); - match serde_json::to_string(message) { - Ok(json_string) => { - // debug!(target: "consensus", "{}", json_string); - self.on_message_string_received(json_string, epoch); + if let Some(message) = self.dispatched_messages.pop_front() { + let epoch = message.epoch(); + match serde_json::to_string(&message) { + Ok(json_string) => { + // debug!(target: "consensus", "{}", json_string); + self.on_message_string_received(json_string, epoch); + } + Err(e) => { + // being unable to interprete a message, could result in consequences + // not being able to report missbehavior, + // or reporting missbehavior, where there was not a missbehavior. + error!(target: "consensus", "could not store hbbft message: {:?}", e); + } } - Err(e) => { - error!(target: "consensus", "could not create json: {:?}", e); + return true; + } + + if let Some(seal) = self.dispatched_seals.pop_front() { + match serde_json::to_string(&seal.0) { + Ok(json_string) => { + self.on_message_string_received(json_string, seal.1); + } + Err(e) => { + // being unable to interprete a message, could result in consequences + // not being able to report missbehavior, + // or reporting missbehavior, where there was not a missbehavior. + error!(target: "consensus", "could not store seal message: {:?}", e); + } } + return true; } - // let content = message.content(); + return false; + // let content = message.content(); //match content { // MessageContent::Subset(subset) => {} - // MessageContent::DecryptionShare { proposer_id, share } => { // debug!("got decryption share from {} {:?}", proposer_id, share); - // if !self.decryption_shares.contains_key(&epoch) { // match self.decryption_shares.insert(epoch, Vec::new()) { // None => {} @@ -165,21 +240,6 @@ impl HbbftMessageMemorium { //} } - pub fn on_sealing_message_received(&mut self, message: &sealing::Message, epoch: u64) { - match serde_json::to_string(message) { - Ok(json_string) => { - // debug!(target: "consensus", "{}", json_string); - - self.on_message_string_received(json_string, epoch); - } - Err(e) => { - error!(target: "consensus", "could not create json: {:?}", e); - } - } - - // todo: also remember sealing messages in an organized way - } - pub fn free_memory(&mut self, _current_block: u64) { // self.signature_shares.remove(&epoch); } diff --git a/crates/ethcore/src/executive.rs b/crates/ethcore/src/executive.rs index 4fe13d27a2..c2a4702c4e 100644 --- a/crates/ethcore/src/executive.rs +++ b/crates/ethcore/src/executive.rs @@ -64,16 +64,16 @@ pub fn contract_address( let code_hash = keccak(code); let mut buffer = [0u8; 1 + 20 + 32 + 32]; buffer[0] = 0xff; - &mut buffer[1..(1 + 20)].copy_from_slice(&sender[..]); - &mut buffer[(1 + 20)..(1 + 20 + 32)].copy_from_slice(&salt[..]); - &mut buffer[(1 + 20 + 32)..].copy_from_slice(&code_hash[..]); + let _ = &mut buffer[1..(1 + 20)].copy_from_slice(&sender[..]); + let _ = &mut buffer[(1 + 20)..(1 + 20 + 32)].copy_from_slice(&salt[..]); + let _ = &mut buffer[(1 + 20 + 32)..].copy_from_slice(&code_hash[..]); (From::from(keccak(&buffer[..])), Some(code_hash)) } CreateContractAddress::FromSenderAndCodeHash => { let code_hash = keccak(code); let mut buffer = [0u8; 20 + 32]; - &mut buffer[..20].copy_from_slice(&sender[..]); - &mut buffer[20..].copy_from_slice(&code_hash[..]); + let _ = &mut buffer[..20].copy_from_slice(&sender[..]); + let _ = &mut buffer[20..].copy_from_slice(&code_hash[..]); (From::from(keccak(&buffer[..])), Some(code_hash)) } } diff --git a/crates/ethcore/sync/src/block_sync.rs b/crates/ethcore/sync/src/block_sync.rs index f76ec0be50..7b8f4c9f6b 100644 --- a/crates/ethcore/sync/src/block_sync.rs +++ b/crates/ethcore/sync/src/block_sync.rs @@ -47,7 +47,7 @@ const MAX_USELESS_HEADERS_PER_ROUND: usize = 3; // logging macros prepend BlockSet context for log filtering macro_rules! trace_sync { ($self:ident, $fmt:expr, $($arg:tt)+) => { - trace!(target: "sync", concat!("{:?}: ", $fmt), $self.block_set, $($arg)+); + trace!(target: "sync", concat!("{:?}: ", $fmt), $self.block_set, $($arg)+) }; ($self:ident, $fmt:expr) => { trace!(target: "sync", concat!("{:?}: ", $fmt), $self.block_set); diff --git a/crates/ethcore/sync/src/chain/mod.rs b/crates/ethcore/sync/src/chain/mod.rs index e4545646fd..e991aff73c 100644 --- a/crates/ethcore/sync/src/chain/mod.rs +++ b/crates/ethcore/sync/src/chain/mod.rs @@ -1202,7 +1202,7 @@ impl ChainSync { match session_info { Some(s) => { warn!(target: "sync", "disabling peer {} {} originated by us: {}. client_version: {}, protocol version: {}", - peer_id, s.remote_address, s.originated, s.client_version, s.protocol_version); + peer_id, s.remote_address, s.originated, s.client_version, s.protocol_version); io.disable_peer(peer_id); self.deactivate_peer(io, peer_id); return; diff --git a/crates/net/network-devp2p/src/connection.rs b/crates/net/network-devp2p/src/connection.rs index 16f22b77e2..27211d9000 100644 --- a/crates/net/network-devp2p/src/connection.rs +++ b/crates/net/network-devp2p/src/connection.rs @@ -434,7 +434,7 @@ impl EncryptedConnection { let mut packet = vec![0u8; 16 + 16 + len + padding + 16]; let mut header = header.out(); header.resize(HEADER_LEN, 0u8); - &mut packet[..HEADER_LEN].copy_from_slice(&mut header); + let _ = &mut packet[..HEADER_LEN].copy_from_slice(&mut header); self.encoder.encrypt(&mut packet[..HEADER_LEN])?; EncryptedConnection::update_mac( &mut self.egress_mac, @@ -444,7 +444,7 @@ impl EncryptedConnection { self.egress_mac .clone() .finalize(&mut packet[HEADER_LEN..32]); - &mut packet[32..32 + len].copy_from_slice(payload); + let _ = &mut packet[32..32 + len].copy_from_slice(payload); self.encoder.encrypt(&mut packet[32..32 + len])?; if padding != 0 { self.encoder @@ -526,7 +526,7 @@ impl EncryptedConnection { let mut prev = H128::default(); mac.clone().finalize(prev.as_bytes_mut()); let mut enc = H128::default(); - &mut enc[..].copy_from_slice(prev.as_bytes()); + let _ = &mut enc[..].copy_from_slice(prev.as_bytes()); let mac_encoder = AesEcb256::new(mac_encoder_key.as_bytes())?; mac_encoder.encrypt(enc.as_bytes_mut())?; diff --git a/crates/rpc/src/v1/helpers/dispatch/mod.rs b/crates/rpc/src/v1/helpers/dispatch/mod.rs index 3d64e4906f..fbbebc5d81 100644 --- a/crates/rpc/src/v1/helpers/dispatch/mod.rs +++ b/crates/rpc/src/v1/helpers/dispatch/mod.rs @@ -135,7 +135,7 @@ pub trait Dispatcher: Send + Sync + Clone { ::Future: Send; /// Converts a `SignedTransaction` into `RichRawTransaction` - fn enrich(&self, SignedTransaction) -> RpcRichRawTransaction; + fn enrich(&self, signed_transaction: SignedTransaction) -> RpcRichRawTransaction; /// "Dispatch" a local transaction. fn dispatch_transaction(&self, signed_transaction: PendingTransaction) -> Result; diff --git a/crates/util/EIP-712/Cargo.toml b/crates/util/EIP-712/Cargo.toml index 48428a0b7c..2a2d0f8ae6 100644 --- a/crates/util/EIP-712/Cargo.toml +++ b/crates/util/EIP-712/Cargo.toml @@ -17,12 +17,12 @@ serde_json = "1.0" ethabi = "12.0.0" keccak-hash = "0.5.0" ethereum-types = "0.9.2" +logos = "0.12.0" failure = "0.1.7" itertools = "0.7" lazy_static = "1.1" regex = "1.0" validator = "0.8" validator_derive = "0.8" -lunarity-lexer = "0.2" rustc-hex = "2.0" indexmap = "1.0.2" diff --git a/crates/util/EIP-712/src/parser.rs b/crates/util/EIP-712/src/parser.rs index a5700558d9..7e19e016e9 100644 --- a/crates/util/EIP-712/src/parser.rs +++ b/crates/util/EIP-712/src/parser.rs @@ -16,7 +16,7 @@ //! Solidity type-name parsing use crate::error::*; -use lunarity_lexer::{Lexer, Token}; +use logos::{Lexer, Logos}; use std::{fmt, result}; #[derive(Debug, Clone, PartialEq)] @@ -35,6 +35,59 @@ pub enum Type { }, } +#[derive(Logos, Debug, Clone, Copy, PartialEq)] +pub enum Token { + #[token("bool")] + TypeBool, + + #[token("address")] + TypeAddress, + + #[token("string")] + TypeString, + + #[regex("byte|bytes[1-2][0-9]?|bytes3[0-2]?|bytes[4-9]", validate_bytes)] + TypeByte(u8), + + #[token("bytes")] + TypeBytes, + + #[regex("int(8|16|24|32|40|48|56|64|72|80|88|96|104|112|120|128|136|144)")] + #[regex("int(152|160|168|176|184|192|200|208|216|224|232|240|248|256)")] + #[token("int")] + TypeInt, + + #[regex("uint(8|16|24|32|40|48|56|64|72|80|88|96|104|112|120|128|136|144)")] + #[regex("uint(152|160|168|176|184|192|200|208|216|224|232|240|248|256)")] + #[token("uint")] + TypeUint, + + #[token("[]")] + Array, + + #[regex("[a-zA-Z_$][a-zA-Z0-9_$]*")] + Identifier, + + #[regex("\\[[0-9]+\\]", |lex| lex.slice()[1..lex.slice().len()-1].parse::().ok() )] + SizedArray(u64), + + #[error] + Error, +} + +fn validate_bytes(lex: &mut Lexer) -> Option { + let slice = lex.slice().as_bytes(); + + if slice.len() > 5 { + if let Some(byte) = slice.get(6) { + return Some((slice[5] - b'0') * 10 + (byte - b'0')); + } + return Some(slice[5] - b'0'); + } else { + return Some(1); + } +} + impl From for String { fn from(field_type: Type) -> String { match field_type { @@ -66,74 +119,49 @@ impl fmt::Display for Type { /// the type string is being validated before it's parsed. pub fn parse_type(field_type: &str) -> Result { - #[derive(PartialEq)] - enum State { - Open, - Close, - } + let mut lex = Token::lexer(field_type); - let mut lexer = Lexer::new(field_type); let mut token = None; - let mut state = State::Close; let mut array_depth = 0; - let mut current_array_length: Option = None; - while lexer.token != Token::EndOfProgram { - let type_ = match lexer.token { - Token::Identifier => Type::Custom(lexer.slice().to_owned()), - Token::TypeByte => Type::Byte(lexer.extras.0), + while let Some(current_token) = lex.next() { + let type_ = match current_token { + Token::Identifier => Type::Custom(lex.slice().to_owned()), + Token::TypeByte(len) => Type::Byte(len), Token::TypeBytes => Type::Bytes, Token::TypeBool => Type::Bool, Token::TypeUint => Type::Uint, Token::TypeInt => Type::Int, Token::TypeString => Type::String, Token::TypeAddress => Type::Address, - Token::LiteralInteger => { - let length = lexer.slice(); - current_array_length = Some( - length - .parse() - .map_err(|_| ErrorKind::InvalidArraySize(length.into()))?, - ); - lexer.advance(); - continue; + Token::Array | Token::SizedArray(_) if array_depth == 10 => { + return Err(ErrorKind::UnsupportedArrayDepth)?; } - Token::BracketOpen if token.is_some() && state == State::Close => { - state = State::Open; - lexer.advance(); + Token::SizedArray(len) => { + token = Some(Type::Array { + inner: Box::new(token.expect("if statement checks for some; qed")), + length: Some(len), + }); + array_depth += 1; continue; } - Token::BracketClose if array_depth < 10 => { - if state == State::Open && token.is_some() { - let length = current_array_length.take(); - state = State::Close; - token = Some(Type::Array { - inner: Box::new(token.expect("if statement checks for some; qed")), - length, - }); - lexer.advance(); - array_depth += 1; - continue; - } else { - return Err(ErrorKind::UnexpectedToken( - lexer.slice().to_owned(), - field_type.to_owned(), - ))?; - } - } - Token::BracketClose if array_depth == 10 => { - return Err(ErrorKind::UnsupportedArrayDepth)?; + Token::Array => { + token = Some(Type::Array { + inner: Box::new(token.expect("if statement checks for some; qed")), + length: None, + }); + array_depth += 1; + continue; } - _ => { + Token::Error => { return Err(ErrorKind::UnexpectedToken( - lexer.slice().to_owned(), + lex.slice().to_owned(), field_type.to_owned(), - ))? + ))?; } }; token = Some(type_); - lexer.advance(); } Ok(token.ok_or(ErrorKind::NonExistentType)?) diff --git a/crates/vm/evm/src/interpreter/memory.rs b/crates/vm/evm/src/interpreter/memory.rs index 22d5a4df5c..36d5e2fabb 100644 --- a/crates/vm/evm/src/interpreter/memory.rs +++ b/crates/vm/evm/src/interpreter/memory.rs @@ -33,7 +33,7 @@ pub trait Memory { /// Read a word from memory fn read(&self, offset: U256) -> U256; /// Write slice of bytes to memory. Does not resize memory! - fn write_slice(&mut self, offset: U256, &[u8]); + fn write_slice(&mut self, offset: U256, _: &[u8]); /// Retrieve part of the memory between offset and offset + size fn read_slice(&self, offset: U256, size: U256) -> &[u8]; /// Retrieve writeable part of memory