diff --git a/hathor/transaction/token_creation_tx.py b/hathor/transaction/token_creation_tx.py index a0d947787..741601aaa 100644 --- a/hathor/transaction/token_creation_tx.py +++ b/hathor/transaction/token_creation_tx.py @@ -15,10 +15,13 @@ from struct import error as StructError, pack from typing import Any, Optional +from typing_extensions import override + from hathor.transaction.base_transaction import TxInput, TxOutput, TxVersion from hathor.transaction.storage import TransactionStorage # noqa: F401 -from hathor.transaction.transaction import Transaction +from hathor.transaction.transaction import TokenInfo, Transaction from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len +from hathor.types import TokenUid # Signal bits (B), version (B), inputs len (B), outputs len (B) _FUNDS_FORMAT_STRING = '!BBBB' @@ -213,6 +216,16 @@ def to_json_extended(self) -> dict[str, Any]: json['tokens'] = [] return json + @override + def get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]: + token_dict = super().get_token_info_from_inputs() + + # we add the created token's info to token_dict, as the creation tx allows for mint/melt + assert self.hash is not None + token_dict[self.hash] = TokenInfo(0, True, True) + + return token_dict + def decode_string_utf8(encoded: bytes, key: str) -> str: """ Raises StructError in case it's not a valid utf-8 string diff --git a/hathor/verification/token_creation_transaction_verifier.py b/hathor/verification/token_creation_transaction_verifier.py index cdb41ace7..e3e7dcec2 100644 --- a/hathor/verification/token_creation_transaction_verifier.py +++ b/hathor/verification/token_creation_transaction_verifier.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +from hathor.transaction import Transaction from hathor.transaction.exceptions import InvalidToken, TransactionDataError from hathor.transaction.token_creation_tx import TokenCreationTransaction -from hathor.transaction.transaction import TokenInfo, Transaction from hathor.transaction.util import clean_token_string +from hathor.util import not_none from hathor.verification.transaction_verifier import TransactionVerifier @@ -39,20 +40,14 @@ def verify_sum(self, tx: Transaction) -> None: :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt """ assert isinstance(tx, TokenCreationTransaction) - token_dict = tx.get_token_info_from_inputs() - - # we add the created token's info to token_dict, as the creation tx allows for mint/melt - assert tx.hash is not None - token_dict[tx.hash] = TokenInfo(0, True, True) - - self.update_token_info_from_outputs(tx, token_dict=token_dict) + token_dict = self.get_complete_token_info(tx) # make sure tokens are being minted - token_info = token_dict[tx.hash] + token_info = token_dict[not_none(tx.hash)] if token_info.amount <= 0: raise InvalidToken('Token creation transaction must mint new tokens') - self.verify_authorities_and_deposit(token_dict) + super().verify_sum(tx) def verify_token_info(self, tx: TokenCreationTransaction) -> None: """ Validates token info diff --git a/hathor/verification/transaction_verifier.py b/hathor/verification/transaction_verifier.py index b24480546..2214d4007 100644 --- a/hathor/verification/transaction_verifier.py +++ b/hathor/verification/transaction_verifier.py @@ -186,8 +186,7 @@ def verify_sum(self, tx: Transaction) -> None: :raises InvalidToken: when there's an error in token operations :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt """ - token_dict = tx.get_token_info_from_inputs() - self.update_token_info_from_outputs(tx, token_dict=token_dict) + token_dict = self.get_complete_token_info(tx) self.verify_authorities_and_deposit(token_dict) def verify_reward_locked(self, tx: Transaction) -> None: @@ -290,3 +289,12 @@ def update_token_info_from_outputs(self, tx: Transaction, *, token_dict: dict[To # for regular outputs, just subtract from the total amount sum_tokens = token_info.amount + tx_output.value token_dict[token_uid] = TokenInfo(sum_tokens, token_info.can_mint, token_info.can_melt) + + def get_complete_token_info(self, tx: Transaction) -> dict[TokenUid, TokenInfo]: + """ + Get a complete token info dict, including data from both inputs and outputs. + """ + token_dict = tx.get_token_info_from_inputs() + self.update_token_info_from_outputs(tx, token_dict=token_dict) + + return token_dict