Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(reliable-integration): add decoded outputs in event responses #763

Merged
merged 1 commit into from
Sep 11, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 30 additions & 7 deletions hathor/event/model/event_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,16 +20,23 @@
from hathor.utils.pydantic import BaseModel


class TxInput(BaseModel):
tx_id: str
index: int
data: str
class DecodedTxOutput(BaseModel, extra=Extra.ignore):
type: str
address: str
timelock: Optional[int]


class TxOutput(BaseModel):
class TxOutput(BaseModel, extra=Extra.ignore):
value: int
script: str
token_data: int
script: str
decoded: Optional[DecodedTxOutput]


class TxInput(BaseModel):
tx_id: str
index: int
spent_output: TxOutput


class SpentOutput(BaseModel):
Expand Down Expand Up @@ -108,7 +115,23 @@ class TxData(BaseEventData, extra=Extra.ignore):

@classmethod
def from_event_arguments(cls, args: EventArguments) -> 'TxData':
tx_json = args.tx.to_json(include_metadata=True)
from hathor.transaction.resources.transaction import get_tx_extra_data
tx_extra_data_json = get_tx_extra_data(args.tx, detail_tokens=False)
tx_json = tx_extra_data_json['tx']
meta_json = tx_extra_data_json['meta']
tx_json['metadata'] = meta_json
tx_json['outputs'] = [
output | dict(decoded=output['decoded'] or None)
for output in tx_json['outputs']
]
tx_json['inputs'] = [
dict(
tx_id=input_['tx_id'],
index=input_['index'],
spent_output=input_
)
for input_ in tx_json['inputs']
]

return cls(**tx_json)

Expand Down
27 changes: 14 additions & 13 deletions hathor/transaction/resources/transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def update_serialized_tokens_array(tx: BaseTransaction, serialized: dict[str, An
serialized['tokens'] = [h.hex() for h in tx.tokens]


def get_tx_extra_data(tx: BaseTransaction) -> dict[str, Any]:
def get_tx_extra_data(tx: BaseTransaction, *, detail_tokens: bool = True) -> dict[str, Any]:
""" Get the data of a tx to be returned to the frontend
Returns success, tx serializes, metadata and spent outputs
"""
Expand Down Expand Up @@ -116,18 +116,19 @@ def get_tx_extra_data(tx: BaseTransaction) -> dict[str, Any]:

serialized['inputs'] = inputs

detailed_tokens = []
for token_uid_hex in serialized['tokens']:
tokens_index = tx.storage.indexes.tokens
assert tokens_index is not None
token_info = tokens_index.get_token_info(bytes.fromhex(token_uid_hex))
detailed_tokens.append({
'uid': token_uid_hex,
'name': token_info.get_name(),
'symbol': token_info.get_symbol(),
})

serialized['tokens'] = detailed_tokens
if detail_tokens:
detailed_tokens = []
for token_uid_hex in serialized['tokens']:
tokens_index = tx.storage.indexes.tokens
assert tokens_index is not None
token_info = tokens_index.get_token_info(bytes.fromhex(token_uid_hex))
detailed_tokens.append({
'uid': token_uid_hex,
'name': token_info.get_name(),
'symbol': token_info.get_symbol(),
})

serialized['tokens'] = detailed_tokens

return {
'success': True,
Expand Down
4 changes: 2 additions & 2 deletions tests/event/test_event_reorg.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from hathor.event.model.event_type import EventType
from hathor.event.storage import EventMemoryStorage
from tests import unittest
from tests.utils import add_new_blocks, get_genesis_key
from tests.utils import BURN_ADDRESS, add_new_blocks, get_genesis_key

settings = HathorSettings()

Expand Down Expand Up @@ -37,7 +37,7 @@ def test_reorg_events(self):
self.log.debug('make reorg block')
block_to_replace = blocks[8]
tb0 = self.manager.make_custom_block_template(block_to_replace.parents[0], block_to_replace.parents[1:])
b0 = tb0.generate_mining_block(self.manager.rng, storage=self.manager.tx_storage)
b0 = tb0.generate_mining_block(self.manager.rng, storage=self.manager.tx_storage, address=BURN_ADDRESS)
b0.weight = 10
b0.resolve()
b0.verify()
Expand Down
Loading