Skip to content

Commit

Permalink
feat(consensus): Change meta.score and meta.accumulated_weight to int…
Browse files Browse the repository at this point in the history
… type

Co-authored-by: Jan Segre <jan@hathor.network>
  • Loading branch information
msbrogli and jansegre committed Aug 22, 2024
1 parent da32f80 commit da4ac0d
Show file tree
Hide file tree
Showing 28 changed files with 353 additions and 248 deletions.
45 changes: 24 additions & 21 deletions hathor/consensus/block_consensus.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,9 @@
from structlog import get_logger

from hathor.conf.get_settings import get_global_settings
from hathor.transaction import BaseTransaction, Block, Transaction, sum_weights
from hathor.transaction import BaseTransaction, Block, Transaction
from hathor.util import classproperty
from hathor.utils.weight import weight_to_work

if TYPE_CHECKING:
from hathor.consensus.context import ConsensusAlgorithmContext
Expand Down Expand Up @@ -117,7 +118,7 @@ def update_voided_info(self, block: Block) -> None:
for h in voided_by:
tx = storage.get_transaction(h)
tx_meta = tx.get_metadata()
tx_meta.accumulated_weight = sum_weights(tx_meta.accumulated_weight, block.weight)
tx_meta.accumulated_weight += weight_to_work(block.weight)
self.context.save(tx)

# Check conflicts of the transactions voiding us.
Expand Down Expand Up @@ -156,27 +157,27 @@ def update_voided_info(self, block: Block) -> None:
is_connected_to_the_best_chain=is_connected_to_the_best_chain)

# First, void this block.
# We need to void this block first, because otherwise it would always be one of the heads.
self.mark_as_voided(block, skip_remove_first_block_markers=True)

# Get the score of the best chains.
# We need to void this block first, because otherwise it would always be one of the heads.
heads = [cast(Block, storage.get_transaction(h)) for h in storage.get_best_block_tips()]
best_score = None
best_score: int | None = None
for head in heads:
head_meta = head.get_metadata(force_reload=True)
if best_score is None:
best_score = head_meta.score
else:
# All heads must have the same score.
assert abs(best_score - head_meta.score) < 1e-10
assert isinstance(best_score, (int, float))
assert best_score == head_meta.score
assert best_score is not None

# Calculate the score.
# We cannot calculate score before getting the heads.
score = self.calculate_score(block)

# Finally, check who the winner is.
if score <= best_score - self._settings.WEIGHT_TOL:
if score < best_score:
# Just update voided_by from parents.
self.update_voided_by_from_parents(block)

Expand All @@ -197,7 +198,7 @@ def update_voided_info(self, block: Block) -> None:
common_block = self._find_first_parent_in_best_chain(block)
self.add_voided_by_to_multiple_chains(block, heads, common_block)

if score >= best_score + self._settings.WEIGHT_TOL:
if score > best_score:
# We have a new winner candidate.
self.update_score_and_mark_as_the_best_chain_if_possible(block)
# As `update_score_and_mark_as_the_best_chain_if_possible` may affect `voided_by`,
Expand Down Expand Up @@ -285,28 +286,30 @@ def update_score_and_mark_as_the_best_chain_if_possible(self, block: Block) -> N
self.update_score_and_mark_as_the_best_chain(block)
self.remove_voided_by_from_chain(block)

best_score: int
if self.update_voided_by_from_parents(block):
storage = block.storage
heads = [cast(Block, storage.get_transaction(h)) for h in storage.get_best_block_tips()]
best_score = 0.0
best_score = 0
best_heads: list[Block]
for head in heads:
head_meta = head.get_metadata(force_reload=True)
if head_meta.score <= best_score - self._settings.WEIGHT_TOL:
if head_meta.score < best_score:
continue

if head_meta.score >= best_score + self._settings.WEIGHT_TOL:
if head_meta.score > best_score:
best_heads = [head]
best_score = head_meta.score
else:
assert abs(best_score - head_meta.score) < 1e-10
assert best_score == head_meta.score
best_heads.append(head)
assert isinstance(best_score, (int, float)) and best_score > 0
assert isinstance(best_score, int) and best_score > 0

assert len(best_heads) > 0
first_block = self._find_first_parent_in_best_chain(best_heads[0])
self.add_voided_by_to_multiple_chains(best_heads[0], [block], first_block)
if len(best_heads) == 1:
assert best_heads[0].hash != block.hash
self.update_score_and_mark_as_the_best_chain_if_possible(best_heads[0])

def update_score_and_mark_as_the_best_chain(self, block: Block) -> None:
Expand Down Expand Up @@ -444,7 +447,7 @@ def remove_first_block_markers(self, block: Block) -> None:
self.context.save(tx)

def _score_block_dfs(self, block: BaseTransaction, used: set[bytes],
mark_as_best_chain: bool, newest_timestamp: int) -> float:
mark_as_best_chain: bool, newest_timestamp: int) -> int:
""" Internal method to run a DFS. It is used by `calculate_score()`.
"""
assert block.storage is not None
Expand All @@ -453,7 +456,7 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes],
storage = block.storage

from hathor.transaction import Block
score = block.weight
score = weight_to_work(block.weight)
for parent in block.get_parents():
if parent.is_block:
assert isinstance(parent, Block)
Expand All @@ -462,7 +465,7 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes],
x = meta.score
else:
x = self._score_block_dfs(parent, used, mark_as_best_chain, newest_timestamp)
score = sum_weights(score, x)
score += x

else:
from hathor.transaction.storage.traversal import BFSTimestampWalk
Expand All @@ -487,7 +490,7 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes],
meta.first_block = block.hash
self.context.save(tx)

score = sum_weights(score, tx.weight)
score += weight_to_work(tx.weight)

# Always save the score when it is calculated.
meta = block.get_metadata()
Expand All @@ -499,12 +502,12 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes],
# Thus, if we have already calculated it, we just check the consistency of the calculation.
# Unfortunately we may have to calculate it more than once when a new block arrives in a side
# side because the `first_block` points only to the best chain.
assert abs(meta.score - score) < 1e-10, \
assert meta.score == score, \
'hash={} meta.score={} score={}'.format(block.hash.hex(), meta.score, score)

return score

def calculate_score(self, block: Block, *, mark_as_best_chain: bool = False) -> float:
def calculate_score(self, block: Block, *, mark_as_best_chain: bool = False) -> int:
""" Calculate block's score, which is the accumulated work of the verified transactions and blocks.
:param: mark_as_best_chain: If `True`, the transactions' will point `meta.first_block` to
Expand All @@ -514,9 +517,9 @@ def calculate_score(self, block: Block, *, mark_as_best_chain: bool = False) ->
if block.is_genesis:
if mark_as_best_chain:
meta = block.get_metadata()
meta.score = block.weight
meta.score = weight_to_work(block.weight)
self.context.save(block)
return block.weight
return weight_to_work(block.weight)

parent = self._find_first_parent_in_best_chain(block)
newest_timestamp = parent.timestamp
Expand Down
9 changes: 5 additions & 4 deletions hathor/consensus/transaction_consensus.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,9 @@
from structlog import get_logger

from hathor.conf.get_settings import get_global_settings
from hathor.transaction import BaseTransaction, Block, Transaction, TxInput, sum_weights
from hathor.transaction import BaseTransaction, Block, Transaction, TxInput
from hathor.util import classproperty
from hathor.utils.weight import weight_to_work

if TYPE_CHECKING:
from hathor.consensus.context import ConsensusAlgorithmContext
Expand Down Expand Up @@ -193,13 +194,13 @@ def update_voided_info(self, tx: Transaction) -> None:
continue
tx2 = tx.storage.get_transaction(h)
tx2_meta = tx2.get_metadata()
tx2_meta.accumulated_weight = sum_weights(tx2_meta.accumulated_weight, tx.weight)
tx2_meta.accumulated_weight += weight_to_work(tx.weight)
self.context.save(tx2)

# Then, we add ourselves.
meta = tx.get_metadata()
assert not meta.voided_by or meta.voided_by == {tx.hash}
assert meta.accumulated_weight == tx.weight
assert meta.accumulated_weight == weight_to_work(tx.weight)
if tx.hash in self.context.consensus.soft_voided_tx_ids:
voided_by.add(self._settings.SOFT_VOIDED_ID)
voided_by.add(tx.hash)
Expand Down Expand Up @@ -298,7 +299,7 @@ def check_conflicts(self, tx: Transaction) -> None:
candidate.update_accumulated_weight(stop_value=meta.accumulated_weight)
tx_meta = candidate.get_metadata()
d = tx_meta.accumulated_weight - meta.accumulated_weight
if abs(d) < self._settings.WEIGHT_TOL:
if d == 0:
tie_list.append(candidate)
elif d > 0:
is_highest = False
Expand Down
2 changes: 2 additions & 0 deletions hathor/event/model/event_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,8 @@ class TxMetadata(BaseModel, extra=Extra.ignore):
twins: list[str]
accumulated_weight: float
score: float
accumulated_weight_raw: str
score_raw: str
first_block: Optional[str]
height: int
validation: str
Expand Down
12 changes: 7 additions & 5 deletions hathor/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,14 +53,15 @@
from hathor.reactor import ReactorProtocol as Reactor
from hathor.reward_lock import is_spent_reward_locked
from hathor.stratum import StratumFactory
from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion, sum_weights
from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion
from hathor.transaction.exceptions import TxValidationError
from hathor.transaction.storage.exceptions import TransactionDoesNotExist
from hathor.transaction.storage.transaction_storage import TransactionStorage
from hathor.transaction.storage.tx_allow_scope import TxAllowScope
from hathor.transaction.vertex_parser import VertexParser
from hathor.types import Address, VertexId
from hathor.util import EnvironmentInfo, LogDuration, Random, calculate_min_significant_weight, not_none
from hathor.util import EnvironmentInfo, LogDuration, Random, not_none
from hathor.utils.weight import calculate_min_significant_weight, weight_to_work
from hathor.verification.verification_service import VerificationService
from hathor.vertex_handler import VertexHandler
from hathor.wallet import BaseWallet
Expand Down Expand Up @@ -823,8 +824,8 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur
timestamp = min(max(current_timestamp, timestamp_min), timestamp_max)
parent_block_metadata = parent_block.get_metadata()
# this is the min weight to cause an increase of twice the WEIGHT_TOL, we make sure to generate a template with
# at least this weight (note that the user of the API can set its own weight, the block sumit API will also
# protect agains a weight that is too small but using WEIGHT_TOL instead of 2*WEIGHT_TOL)
# at least this weight (note that the user of the API can set its own weight, the block submit API will also
# protect against a weight that is too small but using WEIGHT_TOL instead of 2*WEIGHT_TOL)
min_significant_weight = calculate_min_significant_weight(
parent_block_metadata.score,
2 * self._settings.WEIGHT_TOL
Expand All @@ -844,6 +845,7 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur
assert 1 <= len(parents) <= 3, 'Impossible number of parents'
if __debug__ and len(parents) == 3:
assert len(parents_any) == 0, 'Extra parents to choose from that cannot be chosen'
score = parent_block_metadata.score + weight_to_work(weight)
return BlockTemplate(
versions={TxVersion.REGULAR_BLOCK.value, TxVersion.MERGE_MINED_BLOCK.value},
reward=self.daa.get_tokens_issued_per_block(height),
Expand All @@ -854,7 +856,7 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur
parents=parents,
parents_any=parents_any,
height=height,
score=sum_weights(parent_block_metadata.score, weight),
score=score,
signal_bits=self._bit_signaling_service.generate_signal_bits(block=parent_block)
)

Expand Down
2 changes: 1 addition & 1 deletion hathor/mining/block_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class BlockTemplate(NamedTuple):
parents: list[bytes] # required parents, will always have a block and at most 2 txs
parents_any: list[bytes] # list of extra parents to choose from when there are more options
height: int # metadata
score: float # metadata
score: int # metadata
signal_bits: int # signal bits for blocks generated from this template

def generate_minimally_valid_block(self) -> BaseTransaction:
Expand Down
17 changes: 10 additions & 7 deletions hathor/transaction/base_transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
from hathor.transaction.validation_state import ValidationState
from hathor.types import TokenUid, TxOutputScript, VertexId
from hathor.util import classproperty
from hathor.utils.weight import weight_to_work

if TYPE_CHECKING:
from _hashlib import HASH
Expand Down Expand Up @@ -625,11 +626,12 @@ def get_metadata(self, *, force_reload: bool = False, use_storage: bool = True)
metadata = self.storage.get_metadata(self.hash)
self._metadata = metadata
if not metadata:
score = self.weight if self.is_genesis else 0
score = weight_to_work(self.weight) if self.is_genesis else 0
accumulated_weight = weight_to_work(self.weight)
metadata = TransactionMetadata(
settings=self._settings,
hash=self._hash,
accumulated_weight=self.weight,
accumulated_weight=accumulated_weight,
score=score,
)
self._metadata = metadata
Expand All @@ -644,10 +646,11 @@ def reset_metadata(self) -> None:
"""
from hathor.transaction.transaction_metadata import ValidationState
assert self.storage is not None
score = self.weight if self.is_genesis else 0
score = weight_to_work(self.weight) if self.is_genesis else 0
accumulated_weight = weight_to_work(self.weight)
self._metadata = TransactionMetadata(hash=self._hash,
score=score,
accumulated_weight=self.weight)
accumulated_weight=accumulated_weight)
if self.is_genesis:
self._metadata.validation = ValidationState.CHECKPOINT_FULL
self._metadata.voided_by = set()
Expand Down Expand Up @@ -679,7 +682,7 @@ def update_accumulated_weight(self, *, stop_value: float = inf, save_file: bool
if metadata.accumulated_weight > stop_value:
return metadata

accumulated_weight = self.weight
accumulated_weight = weight_to_work(self.weight)

# TODO Another optimization is that, when we calculate the acc weight of a transaction, we
# also partially calculate the acc weight of its descendants. If it were a DFS, when returning
Expand All @@ -694,7 +697,7 @@ def update_accumulated_weight(self, *, stop_value: float = inf, save_file: bool
from hathor.transaction.storage.traversal import BFSTimestampWalk
bfs_walk = BFSTimestampWalk(self.storage, is_dag_funds=True, is_dag_verifications=True, is_left_to_right=True)
for tx in bfs_walk.run(self, skip_root=True):
accumulated_weight = sum_weights(accumulated_weight, tx.weight)
accumulated_weight += weight_to_work(tx.weight)
if accumulated_weight > stop_value:
break

Expand Down Expand Up @@ -729,7 +732,7 @@ def _update_parents_children_metadata(self) -> None:
def _update_initial_accumulated_weight(self) -> None:
"""Update the vertex initial accumulated_weight."""
metadata = self.get_metadata()
metadata.accumulated_weight = self.weight
metadata.accumulated_weight = weight_to_work(self.weight)

def update_timestamp(self, now: int) -> None:
"""Update this tx's timestamp
Expand Down
4 changes: 2 additions & 2 deletions hathor/transaction/resources/decode_tx.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,8 +149,8 @@ def render_GET(self, request):
'conflict_with': [],
'voided_by': [],
'twins': [],
'accumulated_weight': 10,
'score': 12,
'accumulated_weight': '1024',
'score': '4096',
'first_block': None
},
'spent_outputs': {
Expand Down
4 changes: 2 additions & 2 deletions hathor/transaction/resources/transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -427,8 +427,8 @@ def get_list_tx(self, request):
'conflict_with': [],
'voided_by': [],
'twins': [],
'accumulated_weight': 10,
'score': 12,
'accumulated_weight': '1024',
'score': '4096',
'first_block': None
},
'spent_outputs': {
Expand Down
2 changes: 1 addition & 1 deletion hathor/transaction/resources/transaction_confirmation.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def render_GET(self, request):
'success': {
'summary': 'Success',
'value': {
'accumulated_weight': 15.4,
'accumulated_weight': 43237,
'confirmation_level': 0.88,
'stop_value': 14.5,
'accumulated_bigger': True,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# Copyright 2021 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import TYPE_CHECKING

from hathor.transaction.storage.migrations import BaseMigration

if TYPE_CHECKING:
from hathor.transaction.storage import TransactionStorage


class Migration(BaseMigration):
def skip_empty_db(self) -> bool:
return True

def get_db_name(self) -> str:
return 'change_score_acc_weight_metadata'

def run(self, storage: 'TransactionStorage') -> None:
raise Exception('Cannot migrate your database due to an incompatible change in the metadata. '
'Please, delete your data folder and use the latest available snapshot or sync '
'from beginning.')
Loading

0 comments on commit da4ac0d

Please sign in to comment.