From c8ce00b00e406185bb8dd45fb9d650907d56e3fb Mon Sep 17 00:00:00 2001 From: Duc Tho Tran Date: Thu, 24 Sep 2020 18:23:00 +0700 Subject: [PATCH 1/4] Update implementation of Tokenizable for Vec --- src/contract/tokens.rs | 23 +++++++++++++++++++++-- src/types/bytes_array.rs | 5 +++++ src/types/mod.rs | 2 ++ 3 files changed, 28 insertions(+), 2 deletions(-) create mode 100644 src/types/bytes_array.rs diff --git a/src/contract/tokens.rs b/src/contract/tokens.rs index 7b238e2c..f7b876d7 100644 --- a/src/contract/tokens.rs +++ b/src/contract/tokens.rs @@ -1,7 +1,7 @@ //! Contract Functions Output types. use crate::contract::error::Error; -use crate::types::{Address, Bytes, H256, U128, U256}; +use crate::types::{Address, Bytes, BytesArray, H256, U128, U256}; use arrayvec::ArrayVec; use ethabi::Token; @@ -291,10 +291,29 @@ macro_rules! tokenizable_item { } tokenizable_item! { - Token, String, Address, H256, U256, U128, bool, Vec, + Token, String, Address, H256, U256, U128, bool, BytesArray, Vec, i8, i16, i32, i64, i128, u16, u32, u64, u128, } +impl Tokenizable for BytesArray { + fn from_token(token: Token) -> Result { + match token { + Token::FixedArray(tokens) | Token::Array(tokens) => { + let bytes = tokens + .into_iter() + .map(Tokenizable::from_token) + .collect::, Error>>()?; + Ok(Self(bytes)) + } + other => Err(Error::InvalidOutputType(format!("Expected `Array`, got {:?}", other))), + } + } + + fn into_token(self) -> Token { + Token::Array(self.0.into_iter().map(Tokenizable::into_token).collect()) + } +} + impl Tokenizable for Vec { fn from_token(token: Token) -> Result { match token { diff --git a/src/types/bytes_array.rs b/src/types/bytes_array.rs new file mode 100644 index 00000000..d3375fa1 --- /dev/null +++ b/src/types/bytes_array.rs @@ -0,0 +1,5 @@ +use serde::{Deserialize, Serialize}; + +/// Array of bytes wrapper +#[derive(Clone, Debug, Default, Deserialize, PartialEq, Eq, Hash, Serialize)] +pub struct BytesArray(pub Vec); diff --git a/src/types/mod.rs b/src/types/mod.rs index 069d989c..25c6d1a8 100644 --- a/src/types/mod.rs +++ b/src/types/mod.rs @@ -2,6 +2,7 @@ mod block; mod bytes; +mod bytes_array; mod log; mod parity_peers; mod recovery; @@ -17,6 +18,7 @@ mod work; pub use self::block::{Block, BlockHeader, BlockId, BlockNumber}; pub use self::bytes::Bytes; +pub use self::bytes_array::BytesArray; pub use self::log::{Filter, FilterBuilder, Log}; pub use self::parity_peers::{ EthProtocolInfo, ParityPeerInfo, ParityPeerType, PeerNetworkInfo, PeerProtocolsInfo, PipProtocolInfo, From dfbe3e51f2f41b1e8ba6fbf8cebe386b2d044631 Mon Sep 17 00:00:00 2001 From: Duc Tho Tran Date: Thu, 24 Sep 2020 18:42:27 +0700 Subject: [PATCH 2/4] Add tests --- src/contract/tokens.rs | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/src/contract/tokens.rs b/src/contract/tokens.rs index f7b876d7..9058f18a 100644 --- a/src/contract/tokens.rs +++ b/src/contract/tokens.rs @@ -439,8 +439,8 @@ impl_fixed_types!(1024); #[cfg(test)] mod tests { use super::{Detokenize, Tokenizable}; - use crate::types::{Address, U256}; - use ethabi::Token; + use crate::types::{Address, BytesArray, U256}; + use ethabi::{Token, Uint}; fn output() -> R { unimplemented!() @@ -455,6 +455,7 @@ mod tests { let _string: String = output(); let _bool: bool = output(); let _bytes: Vec = output(); + let _bytes_array: BytesArray = output(); let _pair: (U256, bool) = output(); let _vec: Vec = output(); @@ -487,6 +488,14 @@ mod tests { assert_eq!(data[7][0], 8); } + #[test] + fn should_decode_array_of_bytes() { + let token = Token::Array(vec![Token::Uint(Uint::from(0)), Token::Uint(Uint::from(1))]); + let data: BytesArray = Tokenizable::from_token(token).unwrap(); + assert_eq!(data.0[0], 0); + assert_eq!(data.0[1], 1); + } + #[test] fn should_sign_extend_negative_integers() { assert_eq!((-1i8).into_token(), Token::Int(U256::MAX)); From 6710c9f731fe51236966ee797cee187df84d724e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Thu, 24 Sep 2020 15:30:56 +0200 Subject: [PATCH 3/4] Update src/types/bytes_array.rs --- src/types/bytes_array.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/types/bytes_array.rs b/src/types/bytes_array.rs index d3375fa1..2fcfe6fc 100644 --- a/src/types/bytes_array.rs +++ b/src/types/bytes_array.rs @@ -1,5 +1,7 @@ use serde::{Deserialize, Serialize}; -/// Array of bytes wrapper +/// A wrapper type for array of bytes. +/// +/// Implements `Tokenizable` so can be used to retrieve data from `Solidity` contracts returning `byte8[]`. #[derive(Clone, Debug, Default, Deserialize, PartialEq, Eq, Hash, Serialize)] pub struct BytesArray(pub Vec); From c2ea3d968f7385d7015bc2ebd23aa9fa2868b75e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Thu, 24 Sep 2020 17:03:29 +0200 Subject: [PATCH 4/4] cargo fmt --all --- src/types/bytes_array.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/types/bytes_array.rs b/src/types/bytes_array.rs index 2fcfe6fc..e8c3f27f 100644 --- a/src/types/bytes_array.rs +++ b/src/types/bytes_array.rs @@ -1,7 +1,7 @@ use serde::{Deserialize, Serialize}; /// A wrapper type for array of bytes. -/// +/// /// Implements `Tokenizable` so can be used to retrieve data from `Solidity` contracts returning `byte8[]`. #[derive(Clone, Debug, Default, Deserialize, PartialEq, Eq, Hash, Serialize)] pub struct BytesArray(pub Vec);