Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(sol-types): empty data decode #159

Merged
merged 1 commit into from
Jun 27, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion crates/json-abi/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ repository.workspace = true
exclude.workspace = true

[dependencies]
serde = { workspace = true, features = ["derive"] }
alloy-primitives.workspace = true
serde = { workspace = true, features = ["derive"] }

[dev-dependencies]
serde_json.workspace = true
Expand Down
2 changes: 1 addition & 1 deletion crates/rlp/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,6 @@ alloy-rlp-derive = { workspace = true, optional = true }
hex-literal.workspace = true

[features]
default = ["std", "derive"]
default = ["std"]
std = ["arrayvec/std", "bytes/std"]
derive = ["dep:alloy-rlp-derive"]
6 changes: 1 addition & 5 deletions crates/sol-macro/src/expand/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,7 @@ pub(super) fn expand(cx: &ExpCtxt<'_>, error: &ItemError) -> Result<TokenStream>
} = error;
cx.assert_resolved(params)?;

let tokenize_impl: TokenStream = if params.is_empty() {
quote! { ::core::convert::From::from([]) }
} else {
expand_tokenize_func(params.iter())
};
let tokenize_impl = expand_tokenize_func(params.iter());

let signature = cx.signature(name.as_string(), params);
let selector = crate::utils::selector(&signature);
Expand Down
7 changes: 1 addition & 6 deletions crates/sol-macro/src/expand/event.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,12 +90,7 @@ pub(super) fn expand(cx: &ExpCtxt<'_>, event: &ItemEvent) -> Result<TokenStream>
.enumerate()
.map(|(i, p)| expand_event_topic_field(i, p, p.name.as_ref()));

let tokenize_body_impl = if event.parameters.iter().all(|p| p.is_indexed()) {
// special case for empty tuple
quote! {::core::convert::From::from([])}
} else {
expand_event_tokenize_func(event.parameters.iter())
};
let tokenize_body_impl = expand_event_tokenize_func(event.parameters.iter());

let encode_topics_impl = encode_first_topic
.into_iter()
Expand Down
7 changes: 1 addition & 6 deletions crates/sol-macro/src/expand/function.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,11 +108,7 @@ fn expand_call(cx: &ExpCtxt<'_>, function: &ItemFunction) -> Result<TokenStream>
let signature = cx.signature(function.name().as_string(), &function.arguments);
let selector = crate::utils::selector(&signature);

let tokenize_impl = if function.arguments.is_empty() {
quote! { ::core::convert::From::from([]) }
} else {
expand_tokenize_func(function.arguments.iter())
};
let tokenize_impl = expand_tokenize_func(function.arguments.iter());

let tokens = quote! {
#struct_def
Expand Down Expand Up @@ -147,7 +143,6 @@ fn expand_call(cx: &ExpCtxt<'_>, function: &ItemFunction) -> Result<TokenStream>
fn decode_returns(data: &[u8], validate: bool) -> ::alloy_sol_types::Result<Self::Return> {
<Self::ReturnTuple<'_> as ::alloy_sol_types::SolType>::decode(data, validate).map(Into::into)
}

}
};
};
Expand Down
13 changes: 6 additions & 7 deletions crates/sol-macro/src/expand/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -359,21 +359,20 @@ fn expand_from_into_unit(name: &Ident) -> TokenStream {
type UnderlyingRustTuple<'a> = ();

impl From<()> for #name {
fn from(_: ()) -> Self {
#[inline]
fn from((): ()) -> Self {
Self {}
}
}

impl From<#name> for () {
fn from(_: #name) -> Self {
()
}
#[inline]
fn from(#name {}: #name) {}
}

impl ::alloy_sol_types::Encodable<()> for #name {
fn to_tokens(&self) -> <() as ::alloy_sol_types::SolType>::TokenType<'_> {
::alloy_sol_types::token::FixedSeqToken::from([])
}
#[inline]
fn to_tokens(&self) {}
}
}
}
Expand Down
6 changes: 1 addition & 5 deletions crates/sol-macro/src/expand/struct.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,11 +57,7 @@ pub(super) fn expand(_cx: &ExpCtxt<'_>, s: &ItemStruct) -> Result<TokenStream> {
quote!(#encoded_type)
};

let tokenize_impl: TokenStream = if fields.is_empty() {
quote! { ::core::convert::From::from([]) }
} else {
expand_tokenize_func(fields.iter())
};
let tokenize_impl = expand_tokenize_func(fields.iter());

let encode_data_impl = match fields.len() {
0 => unreachable!(),
Expand Down
12 changes: 3 additions & 9 deletions crates/sol-types/src/coder/decoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -197,19 +197,13 @@ impl<'de> Decoder<'de> {

/// Decodes a single token from the underlying buffer.
#[inline]
pub fn decode<T: TokenType<'de>>(&mut self, data: &[u8]) -> Result<T> {
if data.is_empty() {
return Err(Error::Overrun)
}
pub fn decode<T: TokenType<'de>>(&mut self) -> Result<T> {
T::decode_from(self)
}

/// Decodes a sequence of tokens from the underlying buffer.
#[inline]
pub fn decode_sequence<T: TokenType<'de> + TokenSeq<'de>>(&mut self, data: &[u8]) -> Result<T> {
if data.is_empty() {
return Err(Error::Overrun)
}
pub fn decode_sequence<T: TokenType<'de> + TokenSeq<'de>>(&mut self) -> Result<T> {
T::decode_sequence(self)
}
}
Expand All @@ -218,7 +212,7 @@ impl<'de> Decoder<'de> {
/// types param.
pub fn decode<'de, T: TokenSeq<'de>>(data: &'de [u8], validate: bool) -> Result<T> {
let mut decoder = Decoder::new(data, validate);
let res = decoder.decode_sequence::<T>(data)?;
let res = decoder.decode_sequence::<T>()?;
if validate && encode(&res) != data {
return Err(Error::ReserMismatch)
}
Expand Down
9 changes: 4 additions & 5 deletions crates/sol-types/src/types/data_type.rs
Original file line number Diff line number Diff line change
Expand Up @@ -628,14 +628,14 @@ macro_rules! tuple_impls {

impl Encodable<()> for () {
#[inline]
fn to_tokens(&self) -> <() as SolType>::TokenType<'_> {
FixedSeqToken([])
}
fn to_tokens(&self) {}
}

all_the_tuples!(@double tuple_encodable_impls);

impl SolType for () {
type RustType = ();
type TokenType<'a> = FixedSeqToken<(), 0>;
type TokenType<'a> = ();

const ENCODED_SIZE: Option<usize> = Some(0);

Expand All @@ -661,7 +661,6 @@ impl SolType for () {
fn encode_packed_to(_rust: &Self::RustType, _out: &mut Vec<u8>) {}
}

all_the_tuples!(@double tuple_encodable_impls);
all_the_tuples!(tuple_impls);

mod sealed {
Expand Down
11 changes: 11 additions & 0 deletions crates/sol-types/tests/sol.rs
Original file line number Diff line number Diff line change
Expand Up @@ -145,3 +145,14 @@ fn error() {
let e = SomeError { a: U256::from(1) };
assert_eq!(e.encoded_size(), 32);
}

sol! {
interface WETH {
function deposit() external payable;
}
}

#[test]
fn empty_call() {
WETH::depositCall::decode(&WETH::depositCall::SELECTOR, true).expect("it should work");
}