Skip to content

Commit

Permalink
Merge pull request #439 from HathorNetwork/dev
Browse files Browse the repository at this point in the history
Release v0.49.1
  • Loading branch information
jansegre authored Jul 21, 2022
2 parents d39fd0d + 1e8eb5d commit da5c19b
Show file tree
Hide file tree
Showing 11 changed files with 56 additions and 77 deletions.
14 changes: 7 additions & 7 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,13 @@ jobs:
full_matrix = {
'python': ['3.7', '3.8', '3.9', '3.10'],
# available OS's: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idruns-on
# XXX: ubuntu-22.04 comes with an OpenSSL version that has disabled RIPEMD-160, this breaks hathor.crypto.util.get_hash160, which require code workarounds
'os': ['ubuntu-20.04', 'macos-12', 'windows-2022'],
# XXX: tests fail on these, not sure why, when running them individually each on passes, but not on `make tests`
# 'include': [
# {'os': 'ubuntu-22.04', 'python': 'pypy-3.7'},
# {'os': 'ubuntu-22.04', 'python': 'pypy-3.8'},
# ],
'os': ['ubuntu-22.04', 'macos-12', 'windows-2022'],
'include': [
{'os': 'ubuntu-20.04', 'python': '3.7'},
# XXX: tests fail on these, not sure why, when running them individually each on passes, but not on `make tests`
# {'os': 'ubuntu-22.04', 'python': 'pypy-3.7'},
# {'os': 'ubuntu-22.04', 'python': 'pypy-3.8'},
],
}
# this is the fastest one:
reduced_matrix = {
Expand Down
2 changes: 1 addition & 1 deletion hathor/cli/openapi_files/openapi_base.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
],
"info": {
"title": "Hathor API",
"version": "0.49.0"
"version": "0.49.1"
},
"consumes": [
"application/json"
Expand Down
3 changes: 3 additions & 0 deletions hathor/conf/mainnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,9 @@
cp(2_100_000, bytes.fromhex('00000000000000000c96c02d514017263d4e624a61fb9f10babcbf8d4632b67b')),
cp(2_200_000, bytes.fromhex('00000000000000001016a7bbb6ccfc957ba6d29a562b43e8620f57ddc9147dde')),
cp(2_300_000, bytes.fromhex('0000000000000000164dafd8d922c783a99d83f66220eb7c54f11bee1aaac126')),
cp(2_400_000, bytes.fromhex('0000000000000000067aa4bf7306dadf0f56e38380327a472f55e7be72fbe7da')),
cp(2_500_000, bytes.fromhex('00000000000000000c418b03ceb3a4fe7023674811f8ec94d7b9d5b1879ddc28')),
cp(2_600_000, bytes.fromhex('0000000000000000020af703e2955e3f7934e8bc376da2ba6cfc6dc609feaf84')),
],
SOFT_VOIDED_TX_IDS=list(map(bytes.fromhex, [
'0000000012a922a6887497bed9c41e5ed7dc7213cae107db295602168266cd02',
Expand Down
29 changes: 18 additions & 11 deletions hathor/crypto/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,17 +49,24 @@ def get_private_key_from_bytes(private_key_bytes: bytes,
return not_none(load_der_private_key(private_key_bytes, password, _BACKEND))


def get_hash160(public_key_bytes: bytes) -> bytes:
"""The input is hashed twice: first with SHA-256 and then with RIPEMD-160
:type: bytes
:rtype: bytes
"""
key_hash = hashlib.sha256(public_key_bytes)
h = hashlib.new('ripemd160')
h.update(key_hash.digest())
return h.digest()
try:
hashlib.new('ripemd160', b'')
except Exception:
# XXX: the source says "Test-only pure Python RIPEMD160 implementation", however for our case this is acceptable
# for more details see: https://github.com/bitcoin/bitcoin/pull/23716/files which has a copy of the same code
import pycoin.contrib.ripemd160

def get_hash160(public_key_bytes: bytes) -> bytes:
"""The input is hashed twice: first with SHA-256 and then with RIPEMD-160"""
key_hash = hashlib.sha256(public_key_bytes)
return pycoin.contrib.ripemd160.ripemd160(key_hash.digest())
else:
def get_hash160(public_key_bytes: bytes) -> bytes:
"""The input is hashed twice: first with SHA-256 and then with RIPEMD-160"""
key_hash = hashlib.sha256(public_key_bytes)
h = hashlib.new('ripemd160')
h.update(key_hash.digest())
return h.digest()


def get_address_from_public_key(public_key):
Expand Down
3 changes: 3 additions & 0 deletions hathor/transaction/storage/cache_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,9 @@ def __init__(self, store: 'BaseTransactionStorage', reactor: Reactor, interval:
transaction/blocks/metadata when returning those objects.
:type _clone_if_needed: bool
"""
if store.with_index:
raise ValueError('internal storage cannot have indexes enabled')

store.remove_cache()
self.store = store
self.reactor = reactor
Expand Down
12 changes: 11 additions & 1 deletion hathor/transaction/storage/transaction_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,9 @@ def __init__(self):
# Internal toggle to choose when to select topological DFS iterator, used only on some tests
self._always_use_topological_dfs = False

# Only used in self.add_to_indexes to bypass raising an exception
self._saving_genesis = False

@abstractmethod
def _update_caches(self, block_count: int, tx_count: int, latest_timestamp: int, first_timestamp: int) -> None:
"""Update ephemeral caches, should only be used internally."""
Expand Down Expand Up @@ -190,6 +193,7 @@ def get_best_block(self) -> Block:

def _save_or_verify_genesis(self) -> None:
"""Save all genesis in the storage."""
self._saving_genesis = True
for tx in self._get_genesis_from_settings():
try:
assert tx.hash is not None
Expand All @@ -201,6 +205,7 @@ def _save_or_verify_genesis(self) -> None:
tx2 = tx
assert tx2.hash is not None
self._genesis_cache[tx2.hash] = tx2
self._saving_genesis = False

def _get_genesis_from_settings(self) -> List[BaseTransaction]:
"""Return all genesis from settings."""
Expand Down Expand Up @@ -1019,7 +1024,12 @@ def _run_topological_sort_dfs(self, root: BaseTransaction, visited: Dict[bytes,

def add_to_indexes(self, tx: BaseTransaction) -> None:
if not self.with_index:
raise NotImplementedError
if self._saving_genesis:
# XXX: avoid failing on some situations where this is called before we know it's OK to skip
# see: https://github.com/HathorNetwork/hathor-core/pull/436
return
else:
raise NotImplementedError
assert self.indexes is not None
self._latest_timestamp = max(self.latest_timestamp, tx.timestamp)
if self._first_timestamp == 0:
Expand Down
2 changes: 1 addition & 1 deletion hathor/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.

__version__ = '0.49.0'
__version__ = '0.49.1'
54 changes: 5 additions & 49 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

[tool.poetry]
name = "hathor"
version = "0.49.0"
version = "0.49.1"
description = "Hathor Network full-node"
authors = ["Hathor Team <contact@hathor.network>"]
license = "Apache-2.0"
Expand Down Expand Up @@ -61,7 +61,7 @@ ipython = "<8" # ipython 8.0 drops compatibility with Python 3.7
mnemonic = "~0.20"
prometheus_client = "~0.14.1"
pyopenssl = "=22.0.0"
pycoin = "<=0.90.20200322"
pycoin = "~0.92"
pywin32 = {version = "304", markers = "sys_platform == 'win32'"}
requests = "=2.27.1"
service_identity = "~21.1.0"
Expand Down
2 changes: 1 addition & 1 deletion tests/tx/test_cache_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ class BaseCacheStorageTest(unittest.TestCase):
def setUp(self):
super().setUp()

store = TransactionMemoryStorage()
store = TransactionMemoryStorage(with_index=False)
self.cache_storage = TransactionCacheStorage(store, self.clock, capacity=5)
self.cache_storage._manually_initialize()
self.cache_storage.pre_init()
Expand Down
8 changes: 4 additions & 4 deletions tests/tx/test_tx_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -473,7 +473,7 @@ class CacheBinaryStorageTest(BaseCacheStorageTest):

def setUp(self):
self.directory = tempfile.mkdtemp()
store = TransactionBinaryStorage(self.directory)
store = TransactionBinaryStorage(self.directory, with_index=False)
reactor = MemoryReactorHeapClock()
super().setUp(TransactionCacheStorage(store, reactor, capacity=5))

Expand All @@ -489,7 +489,7 @@ def setUp(self):
self.directory = tempfile.mkdtemp()
# Creating random file just to test specific part of code
tempfile.NamedTemporaryFile(dir=self.directory, delete=True)
store = TransactionCompactStorage(self.directory)
store = TransactionCompactStorage(self.directory, with_index=False)
reactor = MemoryReactorHeapClock()
super().setUp(TransactionCacheStorage(store, reactor, capacity=5))

Expand All @@ -509,7 +509,7 @@ class CacheMemoryStorageTest(BaseCacheStorageTest):
__test__ = True

def setUp(self):
store = TransactionMemoryStorage()
store = TransactionMemoryStorage(with_index=False)
reactor = MemoryReactorHeapClock()
super().setUp(TransactionCacheStorage(store, reactor, capacity=5))

Expand Down Expand Up @@ -537,7 +537,7 @@ class CacheRocksDBStorageTest(BaseCacheStorageTest):

def setUp(self):
self.directory = tempfile.mkdtemp()
store = TransactionRocksDBStorage(self.directory)
store = TransactionRocksDBStorage(self.directory, with_index=False)
reactor = MemoryReactorHeapClock()
super().setUp(TransactionCacheStorage(store, reactor, capacity=5))

Expand Down

0 comments on commit da5c19b

Please sign in to comment.