Skip to content

Commit

Permalink
feat(consensus): Change meta.score and meta.accumulated_weight to int…
Browse files Browse the repository at this point in the history
… type
  • Loading branch information
msbrogli committed Nov 11, 2023
1 parent 090bd71 commit 464fb5b
Show file tree
Hide file tree
Showing 24 changed files with 132 additions and 112 deletions.
43 changes: 23 additions & 20 deletions hathor/consensus/block_consensus.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

from hathor.conf.get_settings import get_settings
from hathor.profiler import get_cpu_profiler
from hathor.transaction import BaseTransaction, Block, Transaction, sum_weights
from hathor.transaction import BaseTransaction, Block, Transaction
from hathor.util import classproperty, not_none

if TYPE_CHECKING:
Expand Down Expand Up @@ -120,7 +120,7 @@ def update_voided_info(self, block: Block) -> None:
for h in voided_by:
tx = storage.get_transaction(h)
tx_meta = tx.get_metadata()
tx_meta.accumulated_weight = sum_weights(tx_meta.accumulated_weight, block.weight)
tx_meta.accumulated_weight += int(2**block.weight)
self.context.save(tx)

# Check conflicts of the transactions voiding us.
Expand Down Expand Up @@ -159,10 +159,10 @@ def update_voided_info(self, block: Block) -> None:
is_connected_to_the_best_chain=is_connected_to_the_best_chain)

# First, void this block.
# We need to void this block first, because otherwise it would always be one of the heads.
self.mark_as_voided(block, skip_remove_first_block_markers=True)

# Get the score of the best chains.
# We need to void this block first, because otherwise it would always be one of the heads.
heads = [cast(Block, storage.get_transaction(h)) for h in storage.get_best_block_tips()]
best_score = None
for head in heads:
Expand All @@ -171,15 +171,16 @@ def update_voided_info(self, block: Block) -> None:
best_score = head_meta.score
else:
# All heads must have the same score.
assert abs(best_score - head_meta.score) < 1e-10
assert isinstance(best_score, (int, float))
assert best_score == head_meta.score
assert isinstance(best_score, int)
assert best_score > 0

# Calculate the score.
# We cannot calculate score before getting the heads.
score = self.calculate_score(block)

# Finally, check who the winner is.
if score <= best_score - self._settings.WEIGHT_TOL:
if score < best_score:
# Just update voided_by from parents.
self.update_voided_by_from_parents(block)

Expand All @@ -200,7 +201,7 @@ def update_voided_info(self, block: Block) -> None:
common_block = self._find_first_parent_in_best_chain(block)
self.add_voided_by_to_multiple_chains(block, heads, common_block)

if score >= best_score + self._settings.WEIGHT_TOL:
if score > best_score:
# We have a new winner candidate.
self.update_score_and_mark_as_the_best_chain_if_possible(block)
# As `update_score_and_mark_as_the_best_chain_if_possible` may affect `voided_by`,
Expand Down Expand Up @@ -287,28 +288,30 @@ def update_score_and_mark_as_the_best_chain_if_possible(self, block: Block) -> N
self.update_score_and_mark_as_the_best_chain(block)
self.remove_voided_by_from_chain(block)

best_score: int
if self.update_voided_by_from_parents(block):
storage = block.storage
heads = [cast(Block, storage.get_transaction(h)) for h in storage.get_best_block_tips()]
best_score = 0.0
best_score = 0
best_heads: list[Block]
for head in heads:
head_meta = head.get_metadata(force_reload=True)
if head_meta.score <= best_score - self._settings.WEIGHT_TOL:
if head_meta.score < best_score:
continue

if head_meta.score >= best_score + self._settings.WEIGHT_TOL:
if head_meta.score > best_score:
best_heads = [head]
best_score = head_meta.score
else:
assert abs(best_score - head_meta.score) < 1e-10
assert best_score == head_meta.score
best_heads.append(head)
assert isinstance(best_score, (int, float)) and best_score > 0
assert isinstance(best_score, int) and best_score > 0

assert len(best_heads) > 0
first_block = self._find_first_parent_in_best_chain(best_heads[0])
self.add_voided_by_to_multiple_chains(best_heads[0], [block], first_block)
if len(best_heads) == 1:
assert best_heads[0].hash != block.hash
self.update_score_and_mark_as_the_best_chain_if_possible(best_heads[0])

def update_score_and_mark_as_the_best_chain(self, block: Block) -> None:
Expand Down Expand Up @@ -448,7 +451,7 @@ def remove_first_block_markers(self, block: Block) -> None:
self.context.save(tx)

def _score_block_dfs(self, block: BaseTransaction, used: set[bytes],
mark_as_best_chain: bool, newest_timestamp: int) -> float:
mark_as_best_chain: bool, newest_timestamp: int) -> int:
""" Internal method to run a DFS. It is used by `calculate_score()`.
"""
assert block.storage is not None
Expand All @@ -458,7 +461,7 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes],
storage = block.storage

from hathor.transaction import Block
score = block.weight
score = int(2**block.weight)
for parent in block.get_parents():
if parent.is_block:
assert isinstance(parent, Block)
Expand All @@ -467,7 +470,7 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes],
x = meta.score
else:
x = self._score_block_dfs(parent, used, mark_as_best_chain, newest_timestamp)
score = sum_weights(score, x)
score += x

else:
from hathor.transaction.storage.traversal import BFSTimestampWalk
Expand All @@ -493,7 +496,7 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes],
meta.first_block = block.hash
self.context.save(tx)

score = sum_weights(score, tx.weight)
score += int(2**tx.weight)

# Always save the score when it is calculated.
meta = block.get_metadata()
Expand All @@ -505,12 +508,12 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes],
# Thus, if we have already calculated it, we just check the consistency of the calculation.
# Unfortunately we may have to calculate it more than once when a new block arrives in a side
# side because the `first_block` points only to the best chain.
assert abs(meta.score - score) < 1e-10, \
assert meta.score == score, \
'hash={} meta.score={} score={}'.format(block.hash.hex(), meta.score, score)

return score

def calculate_score(self, block: Block, *, mark_as_best_chain: bool = False) -> float:
def calculate_score(self, block: Block, *, mark_as_best_chain: bool = False) -> int:
""" Calculate block's score, which is the accumulated work of the verified transactions and blocks.
:param: mark_as_best_chain: If `True`, the transactions' will point `meta.first_block` to
Expand All @@ -520,9 +523,9 @@ def calculate_score(self, block: Block, *, mark_as_best_chain: bool = False) ->
if block.is_genesis:
if mark_as_best_chain:
meta = block.get_metadata()
meta.score = block.weight
meta.score = int(2**block.weight)
self.context.save(block)
return block.weight
return int(2**block.weight)

parent = self._find_first_parent_in_best_chain(block)
newest_timestamp = parent.timestamp
Expand Down
8 changes: 4 additions & 4 deletions hathor/consensus/transaction_consensus.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

from hathor.conf.get_settings import get_settings
from hathor.profiler import get_cpu_profiler
from hathor.transaction import BaseTransaction, Block, Transaction, TxInput, sum_weights
from hathor.transaction import BaseTransaction, Block, Transaction, TxInput
from hathor.util import classproperty

if TYPE_CHECKING:
Expand Down Expand Up @@ -199,13 +199,13 @@ def update_voided_info(self, tx: Transaction) -> None:
continue
tx2 = tx.storage.get_transaction(h)
tx2_meta = tx2.get_metadata()
tx2_meta.accumulated_weight = sum_weights(tx2_meta.accumulated_weight, tx.weight)
tx2_meta.accumulated_weight += int(2**tx.weight)
self.context.save(tx2)

# Then, we add ourselves.
meta = tx.get_metadata()
assert not meta.voided_by or meta.voided_by == {tx.hash}
assert meta.accumulated_weight == tx.weight
assert meta.accumulated_weight == int(2**tx.weight)
if tx.hash in self.context.consensus.soft_voided_tx_ids:
voided_by.add(self._settings.SOFT_VOIDED_ID)
voided_by.add(tx.hash)
Expand Down Expand Up @@ -300,7 +300,7 @@ def check_conflicts(self, tx: Transaction) -> None:
candidate.update_accumulated_weight(stop_value=meta.accumulated_weight)
tx_meta = candidate.get_metadata()
d = tx_meta.accumulated_weight - meta.accumulated_weight
if abs(d) < self._settings.WEIGHT_TOL:
if d == 0:
tie_list.append(candidate)
elif d > 0:
is_highest = False
Expand Down
4 changes: 2 additions & 2 deletions hathor/event/model/event_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@ class TxMetadata(BaseModel, extra=Extra.ignore):
received_by: list[int]
children: list[str]
twins: list[str]
accumulated_weight: float
score: float
accumulated_weight: int
score: int
first_block: Optional[str]
height: int
validation: str
Expand Down
24 changes: 5 additions & 19 deletions hathor/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,13 +51,13 @@
from hathor.profiler import get_cpu_profiler
from hathor.pubsub import HathorEvents, PubSubManager
from hathor.stratum import StratumFactory
from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion, sum_weights
from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion
from hathor.transaction.exceptions import TxValidationError
from hathor.transaction.storage import TransactionStorage
from hathor.transaction.storage.exceptions import TransactionDoesNotExist
from hathor.transaction.storage.tx_allow_scope import TxAllowScope
from hathor.types import Address, VertexId
from hathor.util import EnvironmentInfo, LogDuration, Random, Reactor, calculate_min_significant_weight, not_none
from hathor.util import EnvironmentInfo, LogDuration, Random, Reactor, not_none
from hathor.verification.verification_service import VerificationService
from hathor.wallet import BaseWallet

Expand Down Expand Up @@ -794,14 +794,7 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur
)
timestamp = min(max(current_timestamp, timestamp_min), timestamp_max)
parent_block_metadata = parent_block.get_metadata()
# this is the min weight to cause an increase of twice the WEIGHT_TOL, we make sure to generate a template with
# at least this weight (note that the user of the API can set its own weight, the block sumit API will also
# protect agains a weight that is too small but using WEIGHT_TOL instead of 2*WEIGHT_TOL)
min_significant_weight = calculate_min_significant_weight(
parent_block_metadata.score,
2 * self._settings.WEIGHT_TOL
)
weight = max(self.daa.calculate_next_weight(parent_block, timestamp), min_significant_weight)
weight = self.daa.calculate_next_weight(parent_block, timestamp)
height = parent_block.get_height() + 1
parents = [parent_block.hash] + parent_txs.must_include
parents_any = parent_txs.can_include
Expand All @@ -813,6 +806,7 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur
assert 1 <= len(parents) <= 3, 'Impossible number of parents'
if __debug__ and len(parents) == 3:
assert len(parents_any) == 0, 'Extra parents to choose from that cannot be chosen'
score = parent_block_metadata.score + int(2**weight)
return BlockTemplate(
versions={TxVersion.REGULAR_BLOCK.value, TxVersion.MERGE_MINED_BLOCK.value},
reward=self.daa.get_tokens_issued_per_block(height),
Expand All @@ -823,7 +817,7 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur
parents=parents,
parents_any=parents_any,
height=height,
score=sum_weights(parent_block_metadata.score, weight),
score=score,
signal_bits=self._bit_signaling_service.generate_signal_bits(block=parent_block)
)

Expand Down Expand Up @@ -862,15 +856,7 @@ def submit_block(self, blk: Block, fails_silently: bool = True) -> bool:
if parent_hash not in tips:
self.log.warn('submit_block(): Ignoring block: parent not a tip', blk=blk.hash_hex)
return False
parent_block = self.tx_storage.get_transaction(parent_hash)
parent_block_metadata = parent_block.get_metadata()
# this is the smallest weight that won't cause the score to increase, anything equal or smaller is bad
min_insignificant_weight = calculate_min_significant_weight(
parent_block_metadata.score,
self._settings.WEIGHT_TOL
)
if blk.weight <= min_insignificant_weight:
self.log.warn('submit_block(): insignificant weight? accepted anyway', blk=blk.hash_hex, weight=blk.weight)
return self.propagate_tx(blk, fails_silently=fails_silently)

def push_tx(self, tx: Transaction, allow_non_standard_script: bool = False,
Expand Down
2 changes: 1 addition & 1 deletion hathor/mining/block_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class BlockTemplate(NamedTuple):
parents: list[bytes] # required parents, will always have a block and at most 2 txs
parents_any: list[bytes] # list of extra parents to choose from when there are more options
height: int # metadata
score: float # metadata
score: int # metadata
signal_bits: int # signal bits for blocks generated from this template

def generate_minimally_valid_block(self) -> BaseTransaction:
Expand Down
14 changes: 8 additions & 6 deletions hathor/transaction/base_transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -614,11 +614,12 @@ def get_metadata(self, *, force_reload: bool = False, use_storage: bool = True)
# which requires the use of a storage, this is a workaround that should be fixed, places where this
# happens include generating new mining blocks and some tests
height = self.calculate_height() if self.storage else None
score = self.weight if self.is_genesis else 0
score = int(2**self.weight) if self.is_genesis else 1
accumulated_weight = int(2**self.weight)

metadata = TransactionMetadata(
hash=self.hash,
accumulated_weight=self.weight,
accumulated_weight=accumulated_weight,
height=height,
score=score,
min_height=0,
Expand All @@ -635,10 +636,11 @@ def reset_metadata(self) -> None:
"""
from hathor.transaction.transaction_metadata import ValidationState
assert self.storage is not None
score = self.weight if self.is_genesis else 0
score = int(2**self.weight) if self.is_genesis else 0
accumulated_weight = int(2**self.weight)
self._metadata = TransactionMetadata(hash=self.hash,
score=score,
accumulated_weight=self.weight)
accumulated_weight=accumulated_weight)
if self.is_genesis:
self._metadata.validation = ValidationState.CHECKPOINT_FULL
self._metadata.voided_by = set()
Expand Down Expand Up @@ -672,7 +674,7 @@ def update_accumulated_weight(self, *, stop_value: float = inf, save_file: bool
if metadata.accumulated_weight > stop_value:
return metadata

accumulated_weight = self.weight
accumulated_weight = int(2**self.weight)

# TODO Another optimization is that, when we calculate the acc weight of a transaction, we
# also partially calculate the acc weight of its descendants. If it were a DFS, when returning
Expand All @@ -687,7 +689,7 @@ def update_accumulated_weight(self, *, stop_value: float = inf, save_file: bool
from hathor.transaction.storage.traversal import BFSTimestampWalk
bfs_walk = BFSTimestampWalk(self.storage, is_dag_funds=True, is_dag_verifications=True, is_left_to_right=True)
for tx in bfs_walk.run(self, skip_root=True):
accumulated_weight = sum_weights(accumulated_weight, tx.weight)
accumulated_weight += int(2**tx.weight)
if accumulated_weight > stop_value:
break

Expand Down
4 changes: 2 additions & 2 deletions hathor/transaction/resources/decode_tx.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,8 +147,8 @@ def render_GET(self, request):
'conflict_with': [],
'voided_by': [],
'twins': [],
'accumulated_weight': 10,
'score': 12,
'accumulated_weight': 1024,
'score': 4096,
'first_block': None
},
'spent_outputs': {
Expand Down
4 changes: 2 additions & 2 deletions hathor/transaction/resources/transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -405,8 +405,8 @@ def get_list_tx(self, request):
'conflict_with': [],
'voided_by': [],
'twins': [],
'accumulated_weight': 10,
'score': 12,
'accumulated_weight': 1024,
'score': 4096,
'first_block': None
},
'spent_outputs': {
Expand Down
2 changes: 1 addition & 1 deletion hathor/transaction/resources/transaction_confirmation.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def render_GET(self, request):
'success': {
'summary': 'Success',
'value': {
'accumulated_weight': 15.4,
'accumulated_weight': 43237,
'confirmation_level': 0.88,
'stop_value': 14.5,
'accumulated_bigger': True,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# Copyright 2021 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import TYPE_CHECKING

from hathor.transaction.storage.migrations import BaseMigration

if TYPE_CHECKING:
from hathor.transaction.storage import TransactionStorage


class Migration(BaseMigration):
def skip_empty_db(self) -> bool:
return True

def get_db_name(self) -> str:
return 'change_score_acc_weight_metadata'

def run(self, storage: 'TransactionStorage') -> None:
raise Exception('Cannot migrate your database. Please, delete your data folder and sync again.')
Loading

0 comments on commit 464fb5b

Please sign in to comment.