Skip to content

Commit

Permalink
Merge pull request #11 from iamdefinitelyahuman/v1.0.1
Browse files Browse the repository at this point in the history
V1.1.0
  • Loading branch information
iamdefinitelyahuman authored May 30, 2020
2 parents 2ed4d0c + 9623566 commit 7ceefd9
Show file tree
Hide file tree
Showing 5 changed files with 139 additions and 11 deletions.
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## [Unreleased](https://github.com/iamdefinitelyahuman/eth-event)

## [1.1.0](https://github.com/iamdefinitelyahuman/eth-event/releases/tag/v1.1.0) - 2020-05-30
### Added
- Decode events when the ABI contains indexed values but the log has None

### Changed
- More expressive error messages when an event cannot be decoded from an incorrect number of topics

## [1.0.0](https://github.com/iamdefinitelyahuman/eth-event/releases/tag/v1.0.0) - 2020-04-21
### Added
- Anonymous and undecodable events can be returned without raising with the `allow_undecoded` kwarg
Expand Down
46 changes: 37 additions & 9 deletions eth_event/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from typing import Dict, List

from eth_abi import decode_abi, decode_single
from eth_abi.exceptions import InsufficientDataBytes
from eth_abi.exceptions import InsufficientDataBytes, NonEmptyPaddingBytes
from eth_hash.auto import keccak
from hexbytes import HexBytes

Expand Down Expand Up @@ -256,39 +256,67 @@ def _params(abi_params: List) -> List:


def _decode(inputs: List, topics: List, data: str) -> List:
indexed_count = len([i for i in inputs if i["indexed"]])

if indexed_count and not topics:
# special case - if the ABI has indexed values but the log does not,
# we should still be able to decode the data
unindexed_types = inputs

else:
if indexed_count < len(topics):
raise EventError(
"Event log does not contain enough topics for the given ABI - this"
" is usually because an event argument is not marked as indexed"
)
if indexed_count > len(topics):
raise EventError(
"Event log contains more topics than expected for the given ABI - this is"
" usually because an event argument is incorrectly marked as indexed"
)
unindexed_types = [i for i in inputs if not i["indexed"]]

# decode the unindexed event data
try:
types = _params([i for i in inputs if not i["indexed"]])
unindexed_types = _params(unindexed_types)
except (KeyError, TypeError):
raise ABIError("Invalid ABI")

if types and data == "0x":
data += "0" * (len(types) * 64)
if unindexed_types and data == "0x":
length = len(unindexed_types) * 32
data = f"0x{bytes(length).hex()}"

try:
decoded = list(decode_abi(types, HexBytes(data)))[::-1]
decoded = list(decode_abi(unindexed_types, HexBytes(data)))[::-1]
except InsufficientDataBytes:
raise EventError("Insufficient event data")
raise EventError("Event data has insufficient length")
except NonEmptyPaddingBytes:
raise EventError("Malformed data field in event log")
except OverflowError:
raise EventError("Cannot decode event due to overflow error")

# decode the indexed event data and create the returned dict
topics = topics[::-1]
result = []
for i in inputs:
result.append({"name": i["name"], "type": i["type"]})

if "components" in i:
result[-1]["components"] = i["components"]
if i["indexed"]:
if not topics:
raise EventError("Insufficient event data")

if topics and i["indexed"]:
encoded = HexBytes(topics.pop())
try:
value = decode_single(i["type"], encoded)
except (InsufficientDataBytes, OverflowError):
# an array or other data type that uses multiple slots
result[-1].update({"value": encoded.hex(), "decoded": False})
continue
else:
value = decoded.pop()

if isinstance(value, bytes):
# converting to `HexBytes` first ensures the leading `0x`
value = HexBytes(value).hex()
result[-1].update({"value": value, "decoded": True})

Expand Down
3 changes: 2 additions & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 1.0.0
current_version = 1.1.0

[bumpversion:file:setup.py]

Expand All @@ -21,3 +21,4 @@ use_parentheses = True

[tool:pytest]
addopts = --cov=eth_event/ --cov-report term --cov-report xml

2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

setup(
name="eth-event",
version="1.0.0", # do not edit directly, use bumpversion
version="1.1.0", # do not edit directly, use bumpversion
license="MIT",
description="Ethereum event decoder and topic generator",
long_description=long_description,
Expand Down
92 changes: 92 additions & 0 deletions tests/test_incorrect_topics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
import pytest
from hexbytes import HexBytes

from eth_event import EventError, decode_log, get_topic_map

abi_no_indexes = {
"anonymous": False,
"inputs": [
{"indexed": False, "name": "sender", "type": "address"},
{"indexed": False, "name": "receiver", "type": "address"},
{"indexed": False, "name": "value", "type": "uint256"},
],
"name": "Transfer",
"type": "event",
}

abi_two_indexes = {
"anonymous": False,
"inputs": [
{"indexed": True, "name": "sender", "type": "address"},
{"indexed": True, "name": "receiver", "type": "address"},
{"indexed": False, "name": "value", "type": "uint256"},
],
"name": "Transfer",
"type": "event",
}

log_no_indexes = {
"logIndex": 0,
"transactionIndex": 0,
"transactionHash": HexBytes(
"0xe1997c46647f74afdffb1473930f90b0ebf6902a75dda0d0c3cbd6d1b57a91e1"
),
"blockHash": HexBytes("0xf571bead3dcf424322ddd02fa7fc0496e06386bef4029de55e2255dee4945cdf"),
"blockNumber": 4,
"address": "0xE7eD6747FaC5360f88a2EFC03E00d25789F69291",
"data": "0x00000000000000000000000066ab6d9362d4f35596279692f0251db63516587100000000000000000000000066ab6d9362d4f35596279692f0251db6351658710000000000000000000000000000000000000000000000000000000000002710", # noqa: E501
"topics": [HexBytes("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef")],
"type": "mined",
}

log_one_index = {
"logIndex": 0,
"transactionIndex": 0,
"transactionHash": HexBytes(
"0xe6eba721271cedb7a492c513d5dfedab73cc54f1a5a337fb027d23f523cada49"
),
"blockHash": HexBytes("0x764e1736dc9cba192b2ebf2911abcad9168d4db35850a678600637f5a6de27ed"),
"blockNumber": 4,
"address": "0xE7eD6747FaC5360f88a2EFC03E00d25789F69291",
"data": "0x00000000000000000000000066ab6d9362d4f35596279692f0251db6351658710000000000000000000000000000000000000000000000000000000000002710", # noqa: E501
"topics": [
HexBytes("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"),
HexBytes("0x00000000000000000000000033a4622b82d4c04a53e170c638b944ce27cffce3"),
],
"type": "mined",
}

log_two_indexes = {
"logIndex": 0,
"transactionIndex": 0,
"transactionHash": HexBytes(
"0xe6eba721271cedb7a492c513d5dfedab73cc54f1a5a337fb027d23f523cada49"
),
"blockHash": HexBytes("0x764e1736dc9cba192b2ebf2911abcad9168d4db35850a678600637f5a6de27ed"),
"blockNumber": 4,
"address": "0xE7eD6747FaC5360f88a2EFC03E00d25789F69291",
"data": "0x0000000000000000000000000000000000000000000000000000000000002710",
"topics": [
HexBytes("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"),
HexBytes("0x00000000000000000000000066ab6d9362d4f35596279692f0251db635165871"),
HexBytes("0x00000000000000000000000033a4622b82d4c04a53e170c638b944ce27cffce3"),
],
"type": "mined",
}


def test_no_topics_in_log():
topic_map = get_topic_map([abi_two_indexes])
decode_log(log_no_indexes, topic_map)


def test_too_many_log_topics():
topic_map = get_topic_map([abi_no_indexes])
with pytest.raises(EventError, match="does not contain enough topics"):
decode_log(log_two_indexes, topic_map)


def test_insufficient_log_topics():
topic_map = get_topic_map([abi_two_indexes])
with pytest.raises(EventError, match="more topics than expected"):
decode_log(log_one_index, topic_map)

0 comments on commit 7ceefd9

Please sign in to comment.