Skip to content

Commit

Permalink
refactor(verification): move token info
Browse files Browse the repository at this point in the history
  • Loading branch information
glevco committed Nov 16, 2023
1 parent 351e4d3 commit 5693c13
Show file tree
Hide file tree
Showing 4 changed files with 47 additions and 46 deletions.
4 changes: 2 additions & 2 deletions hathor/transaction/token_creation_tx.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,8 +217,8 @@ def to_json_extended(self) -> dict[str, Any]:
return json

@override
def get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]:
token_dict = super().get_token_info_from_inputs()
def _get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]:
token_dict = super()._get_token_info_from_inputs()

# we add the created token's info to token_dict, as the creation tx allows for mint/melt
assert self.hash is not None
Expand Down
43 changes: 42 additions & 1 deletion hathor/transaction/transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from hathor.profiler import get_cpu_profiler
from hathor.transaction import BaseTransaction, Block, TxInput, TxOutput, TxVersion
from hathor.transaction.base_transaction import TX_HASH_SIZE
from hathor.transaction.exceptions import InvalidToken
from hathor.transaction.util import VerboseCallback, unpack, unpack_len
from hathor.types import TokenUid, VertexId
from hathor.util import not_none
Expand Down Expand Up @@ -278,7 +279,16 @@ def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None:
raise InvalidNewTransaction(f'Invalid new transaction {self.hash_hex}: expected to reach a checkpoint but '
'none of its children is checkpoint-valid')

def get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]:
def get_complete_token_info(self) -> dict[TokenUid, TokenInfo]:
"""
Get a complete token info dict, including data from both inputs and outputs.
"""
token_dict = self._get_token_info_from_inputs()
self._update_token_info_from_outputs(token_dict=token_dict)

return token_dict

def _get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]:
"""Sum up all tokens present in the inputs and their properties (amount, can_mint, can_melt)
"""
token_dict: dict[TokenUid, TokenInfo] = {}
Expand All @@ -305,6 +315,37 @@ def get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]:

return token_dict

def _update_token_info_from_outputs(self, *, token_dict: dict[TokenUid, TokenInfo]) -> None:
"""Iterate over the outputs and add values to token info dict. Updates the dict in-place.
Also, checks if no token has authorities on the outputs not present on the inputs
:raises InvalidToken: when there's an error in token operations
"""
# iterate over outputs and add values to token_dict
for index, tx_output in enumerate(self.outputs):
token_uid = self.get_token_uid(tx_output.get_token_index())
token_info = token_dict.get(token_uid)
if token_info is None:
raise InvalidToken('no inputs for token {}'.format(token_uid.hex()))
else:
# for authority outputs, make sure the same capability (mint/melt) was present in the inputs
if tx_output.can_mint_token() and not token_info.can_mint:
raise InvalidToken('output has mint authority, but no input has it: {}'.format(
tx_output.to_human_readable()))
if tx_output.can_melt_token() and not token_info.can_melt:
raise InvalidToken('output has melt authority, but no input has it: {}'.format(
tx_output.to_human_readable()))

if tx_output.is_token_authority():
# make sure we only have authorities that we know of
if tx_output.value > TxOutput.ALL_AUTHORITIES:
raise InvalidToken('Invalid authorities in output (0b{0:b})'.format(tx_output.value))
else:
# for regular outputs, just subtract from the total amount
sum_tokens = token_info.amount + tx_output.value
token_dict[token_uid] = TokenInfo(sum_tokens, token_info.can_mint, token_info.can_melt)

def iter_spent_rewards(self) -> Iterator[Block]:
"""Iterate over all the rewards being spent, assumes tx has been verified."""
for input_tx in self.inputs:
Expand Down
2 changes: 1 addition & 1 deletion hathor/verification/token_creation_transaction_verifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def verify_sum(self, tx: Transaction) -> None:
:raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt
"""
assert isinstance(tx, TokenCreationTransaction)
token_dict = self.get_complete_token_info(tx)
token_dict = tx.get_complete_token_info()

# make sure tokens are being minted
token_info = token_dict[not_none(tx.hash)]
Expand Down
44 changes: 2 additions & 42 deletions hathor/verification/transaction_verifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# limitations under the License.

from hathor.profiler import get_cpu_profiler
from hathor.transaction import BaseTransaction, Transaction, TxInput, TxOutput
from hathor.transaction import BaseTransaction, Transaction, TxInput
from hathor.transaction.exceptions import (
ConflictingInputs,
DuplicatedParents,
Expand Down Expand Up @@ -186,7 +186,7 @@ def verify_sum(self, tx: Transaction) -> None:
:raises InvalidToken: when there's an error in token operations
:raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt
"""
token_dict = self.get_complete_token_info(tx)
token_dict = tx.get_complete_token_info()
self.verify_authorities_and_deposit(token_dict)

def verify_reward_locked(self, tx: Transaction) -> None:
Expand Down Expand Up @@ -258,43 +258,3 @@ def verify_authorities_and_deposit(self, token_dict: dict[TokenUid, TokenInfo])
htr_info.amount,
htr_expected_amount,
))

def update_token_info_from_outputs(self, tx: Transaction, *, token_dict: dict[TokenUid, TokenInfo]) -> None:
"""Iterate over the outputs and add values to token info dict. Updates the dict in-place.
Also, checks if no token has authorities on the outputs not present on the inputs
:raises InvalidToken: when there's an error in token operations
"""
# iterate over outputs and add values to token_dict
for index, tx_output in enumerate(tx.outputs):
token_uid = tx.get_token_uid(tx_output.get_token_index())
token_info = token_dict.get(token_uid)
if token_info is None:
raise InvalidToken('no inputs for token {}'.format(token_uid.hex()))
else:
# for authority outputs, make sure the same capability (mint/melt) was present in the inputs
if tx_output.can_mint_token() and not token_info.can_mint:
raise InvalidToken('output has mint authority, but no input has it: {}'.format(
tx_output.to_human_readable()))
if tx_output.can_melt_token() and not token_info.can_melt:
raise InvalidToken('output has melt authority, but no input has it: {}'.format(
tx_output.to_human_readable()))

if tx_output.is_token_authority():
# make sure we only have authorities that we know of
if tx_output.value > TxOutput.ALL_AUTHORITIES:
raise InvalidToken('Invalid authorities in output (0b{0:b})'.format(tx_output.value))
else:
# for regular outputs, just subtract from the total amount
sum_tokens = token_info.amount + tx_output.value
token_dict[token_uid] = TokenInfo(sum_tokens, token_info.can_mint, token_info.can_melt)

def get_complete_token_info(self, tx: Transaction) -> dict[TokenUid, TokenInfo]:
"""
Get a complete token info dict, including data from both inputs and outputs.
"""
token_dict = tx.get_token_info_from_inputs()
self.update_token_info_from_outputs(tx, token_dict=token_dict)

return token_dict

0 comments on commit 5693c13

Please sign in to comment.