From 1440a4cee90079e792a773114cbe3a853040287c Mon Sep 17 00:00:00 2001 From: Chiwon Cho Date: Sat, 29 Feb 2020 23:03:00 +0900 Subject: [PATCH 01/15] VERSION: 1.6.1 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index ce6a70b9d..2eda823ff 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.6.0 \ No newline at end of file +1.6.1 \ No newline at end of file From 5915cc61c47760a663cf50f6590aba5acdff0d63 Mon Sep 17 00:00:00 2001 From: Chiwon Cho Date: Sun, 1 Mar 2020 00:26:11 +0900 Subject: [PATCH 02/15] Adjust the number of transactions in a block * Add new configuration: "BlockInvokeTimeout" * Default timeout: 15s --- iconservice/icon_config.py | 5 ++-- iconservice/icon_constant.py | 5 ++++ iconservice/icon_service_engine.py | 31 ++++++++++++++++++++++-- iconservice/utils/timer.py | 38 ++++++++++++++++++++++++++++++ 4 files changed, 75 insertions(+), 4 deletions(-) create mode 100644 iconservice/utils/timer.py diff --git a/iconservice/icon_config.py b/iconservice/icon_config.py index 1ca60a379..82c969604 100644 --- a/iconservice/icon_config.py +++ b/iconservice/icon_config.py @@ -15,7 +15,7 @@ from .icon_constant import ( ConfigKey, ICX_IN_LOOP, TERM_PERIOD, IISS_DAY_BLOCK, PREP_MAIN_PREPS, PREP_MAIN_AND_SUB_PREPS, PENALTY_GRACE_PERIOD, LOW_PRODUCTIVITY_PENALTY_THRESHOLD, - BLOCK_VALIDATION_PENALTY_THRESHOLD, BACKUP_FILES + BLOCK_VALIDATION_PENALTY_THRESHOLD, BACKUP_FILES, BLOCK_INVOKE_TIMEOUT_S ) default_icon_config = { @@ -61,5 +61,6 @@ ConfigKey.BLOCK_VALIDATION_PENALTY_THRESHOLD: BLOCK_VALIDATION_PENALTY_THRESHOLD, ConfigKey.STEP_TRACE_FLAG: False, ConfigKey.PRECOMMIT_DATA_LOG_FLAG: False, - ConfigKey.BACKUP_FILES: BACKUP_FILES + ConfigKey.BACKUP_FILES: BACKUP_FILES, + ConfigKey.BLOCK_INVOKE_TIMEOUT: BLOCK_INVOKE_TIMEOUT_S } diff --git a/iconservice/icon_constant.py b/iconservice/icon_constant.py index 91ef1dc26..b52711edd 100644 --- a/iconservice/icon_constant.py +++ b/iconservice/icon_constant.py @@ -185,6 +185,9 @@ class ConfigKey: # The maximum number of backup files for rollback BACKUP_FILES = "backupFiles" + # Block invoke timeout in second + BLOCK_INVOKE_TIMEOUT = "blockInvokeTimeout" + class EnableThreadFlag(IntFlag): INVOKE = 1 @@ -306,6 +309,8 @@ class DeployState(IntEnum): BACKUP_FILES = 10 +BLOCK_INVOKE_TIMEOUT_S = 15 + class RCStatus(IntEnum): NOT_READY = 0 diff --git a/iconservice/icon_service_engine.py b/iconservice/icon_service_engine.py index 821546124..192999c95 100644 --- a/iconservice/icon_service_engine.py +++ b/iconservice/icon_service_engine.py @@ -44,7 +44,8 @@ ICON_DEX_DB_NAME, ICON_SERVICE_LOG_TAG, IconServiceFlag, ConfigKey, IISS_METHOD_TABLE, PREP_METHOD_TABLE, NEW_METHOD_TABLE, Revision, BASE_TRANSACTION_INDEX, IISS_DB, IISS_INITIAL_IREP, DEBUG_METHOD_TABLE, PREP_MAIN_PREPS, PREP_MAIN_AND_SUB_PREPS, - ISCORE_EXCHANGE_RATE, STEP_LOG_TAG, TERM_PERIOD, BlockVoteStatus, WAL_LOG_TAG, ROLLBACK_LOG_TAG + ISCORE_EXCHANGE_RATE, STEP_LOG_TAG, TERM_PERIOD, BlockVoteStatus, WAL_LOG_TAG, ROLLBACK_LOG_TAG, + BLOCK_INVOKE_TIMEOUT_S ) from .iconscore.icon_pre_validator import IconPreValidator from .iconscore.icon_score_class_loader import IconScoreClassLoader @@ -75,6 +76,7 @@ from .utils import sha3_256, int_to_bytes, ContextEngine, ContextStorage from .utils import to_camel_case, bytes_to_hex from .utils.bloom import BloomFilter +from .utils.timer import Timer if TYPE_CHECKING: from .iconscore.icon_score_event_log import EventLog @@ -112,6 +114,7 @@ def __init__(self): self._backup_manager: Optional[BackupManager] = None self._backup_cleaner: Optional[BackupCleaner] = None self._conf: Optional[Dict[str, Union[str, int]]] = None + self._block_invoke_timeout_s: int = BLOCK_INVOKE_TIMEOUT_S # JSON-RPC handlers self._handlers = { @@ -215,6 +218,8 @@ def open(self, conf: 'IconConfig'): context, Address.from_string(conf[ConfigKey.BUILTIN_SCORE_OWNER])) self._init_global_value_by_governance_score(context) + self._set_block_invoke_timeout(conf) + # DO NOT change the values in conf self._conf = conf @@ -470,7 +475,6 @@ def invoke(self, :param is_block_editable: boolean which imply whether creating base transaction or not :return: (TransactionResult[], bytes, added transaction{}, main prep as dict{}) """ - # If the block has already been processed, # return the result from PrecommitDataManager precommit_data: 'PrecommitData' = self._precommit_data_manager.get(block.hash) @@ -524,7 +528,20 @@ def invoke(self, context.block_batch.update(context.tx_batch) context.tx_batch.clear() else: + tx_timer = Timer() + tx_timer.start() + for index, tx_request in enumerate(tx_requests): + # Adjust the number of transactions in a block to make sure that + # a leader can broadcast a block candidate to validators in a specific period. + if is_block_editable: + tx_timer.stop() + if tx_timer.duration >= self._block_invoke_timeout_s: + Logger.info( + tag=self.TAG, + msg=f"Stop to invoke remaining transactions: {index} / {len(tx_requests)}") + break + if index == BASE_TRANSACTION_INDEX and context.is_decentralized(): if not tx_request['params'].get('dataType') == "base": raise InvalidBaseTransactionException( @@ -2388,3 +2405,13 @@ def _finish_to_recover_rollback(self): end_block_height=metadata.last_block.height - 1) Logger.debug(tag=self.TAG, msg="_finish_to_recover_rollback() end") + + def _set_block_invoke_timeout(self, conf: Dict[str, Union[str, int]]): + try: + timeout_s: int = conf[ConfigKey.BLOCK_INVOKE_TIMEOUT] + if timeout_s > 0: + self._block_invoke_timeout_s = timeout_s + except: + pass + + Logger.info(tag=self.TAG, msg=f"{ConfigKey.BLOCK_INVOKE_TIMEOUT}: {self._block_invoke_timeout_s}") diff --git a/iconservice/utils/timer.py b/iconservice/utils/timer.py new file mode 100644 index 000000000..3712d9b84 --- /dev/null +++ b/iconservice/utils/timer.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 ICON Foundation Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + + +class Timer(object): + def __init__(self): + self._start_time_s: float = 0 + self._duration_s: float = 0 + self._end_time_s: float = 0 + + @property + def duration(self) -> float: + return self._duration_s + + def start(self) -> float: + time_s: float = time.time() + self._start_time_s = time_s + return time_s + + def stop(self) -> float: + time_s: float = time.time() + self._end_time_s = time_s + self._duration_s = time_s - self._start_time_s + return time_s From 454d69f9dfadf0b48daeee05846243b26abba479 Mon Sep 17 00:00:00 2001 From: leeheonseung Date: Mon, 2 Mar 2020 16:09:40 +0900 Subject: [PATCH 03/15] IS-1021: DictDB iteration causes infinite loop officially we do not support iteration on DictDB so add raise Exception code references https://www.python.org/dev/peps/pep-0234/ https://docs.python.org/3/reference/datamodel.html#object.__getitem__ --- iconservice/iconscore/icon_container_db.py | 3 ++ tests/icon_score/test_icon_container_db.py | 30 +++++++++++++ .../sample_scores/sample_dict_db/package.json | 5 +++ .../sample_dict_db/sample_dict_db.py | 33 ++++++++++++++ ...y => test_integrate_container_db_patch.py} | 43 +++++++++++++++++++ 5 files changed, 114 insertions(+) create mode 100644 tests/integrate_test/samples/sample_scores/sample_dict_db/package.json create mode 100644 tests/integrate_test/samples/sample_scores/sample_dict_db/sample_dict_db.py rename tests/integrate_test/{test_integrate_array_db_patch.py => test_integrate_container_db_patch.py} (72%) diff --git a/iconservice/iconscore/icon_container_db.py b/iconservice/iconscore/icon_container_db.py index 4b2be31c1..36e6d5d23 100644 --- a/iconservice/iconscore/icon_container_db.py +++ b/iconservice/iconscore/icon_container_db.py @@ -216,6 +216,9 @@ def __remove(self, key: K) -> None: raise InvalidContainerAccessException('DictDB depth mismatch') self._db.delete(get_encoded_key(key)) + def __iter__(self): + raise InvalidContainerAccessException("Not Supported on DictDB") + class ArrayDB(object): """ diff --git a/tests/icon_score/test_icon_container_db.py b/tests/icon_score/test_icon_container_db.py index 7ddbdbb6f..3ade384f8 100644 --- a/tests/icon_score/test_icon_container_db.py +++ b/tests/icon_score/test_icon_container_db.py @@ -339,3 +339,33 @@ def test_container_util(self): with self.assertRaises(InvalidParamsException): prefix: bytes = ContainerUtil.create_db_prefix(VarDB, 'vardb') + +class TestOnlyGetItemObj: + def __init__(self, limit): + self._limit = limit + + def __getitem__(self, key): + if key > self._limit: + raise IndexError(key) + + +class TestIterObj(TestOnlyGetItemObj): + def __iter__(self): + pass + + +class TestContainerDB(unittest.TestCase): + def test_getitem(self): + limit = 100 + datas = TestOnlyGetItemObj(limit) + index = 0 + for index, e in enumerate(datas): + pass + self.assertEqual(index, limit) + + def test_iter(self): + limit = 100 + datas = TestIterObj(limit) + with self.assertRaises(TypeError) as e: + for e in datas: + pass diff --git a/tests/integrate_test/samples/sample_scores/sample_dict_db/package.json b/tests/integrate_test/samples/sample_scores/sample_dict_db/package.json new file mode 100644 index 000000000..6cc62a78a --- /dev/null +++ b/tests/integrate_test/samples/sample_scores/sample_dict_db/package.json @@ -0,0 +1,5 @@ +{ + "version": "0.0.1", + "main_file": "sample_dict_db", + "main_score": "IterableDictDB" +} \ No newline at end of file diff --git a/tests/integrate_test/samples/sample_scores/sample_dict_db/sample_dict_db.py b/tests/integrate_test/samples/sample_scores/sample_dict_db/sample_dict_db.py new file mode 100644 index 000000000..9bfd3b355 --- /dev/null +++ b/tests/integrate_test/samples/sample_scores/sample_dict_db/sample_dict_db.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- + +from iconservice import * +TAG = 'IterableDictDB' + + +class IterableDictDB(IconScoreBase): + """ IterableDictDB SCORE Base implementation """ + + # ================================================ + # Initialization + # ================================================ + def __init__(self, db: IconScoreDatabase) -> None: + super().__init__(db) + self._dict = DictDB('DICT', db, value_type=int) + + def on_install(self) -> None: + super().on_install() + + def on_update(self) -> None: + super().on_update() + + @external + def create_item(self, key: str, value: int) -> None: + self._dict[key] = value + + @external(readonly=True) + def get_items(self) -> list: + items = [] + for item in self._dict: + print("11") + items.append(item) + return items diff --git a/tests/integrate_test/test_integrate_array_db_patch.py b/tests/integrate_test/test_integrate_container_db_patch.py similarity index 72% rename from tests/integrate_test/test_integrate_array_db_patch.py rename to tests/integrate_test/test_integrate_container_db_patch.py index a6ddf2529..c958138e9 100644 --- a/tests/integrate_test/test_integrate_array_db_patch.py +++ b/tests/integrate_test/test_integrate_container_db_patch.py @@ -21,6 +21,7 @@ from iconservice.base.address import GOVERNANCE_SCORE_ADDRESS from iconservice.icon_constant import Revision +from iconservice.base.exception import InvalidContainerAccessException from tests.integrate_test.test_integrate_base import TestIntegrateBase if TYPE_CHECKING: @@ -112,3 +113,45 @@ def test_array_db_patch(self): ) self.assertEqual(len(response), 2) + + +class TestIntegrateDictDBPatch(TestIntegrateBase): + def test_dict_db_defective(self): + self.update_governance("0_0_4") + + expected_status = { + "code": Revision.TWO.value, + "name": "1.1.0" + } + + query_request = { + "version": self._version, + "from": self._accounts[0], + "to": GOVERNANCE_SCORE_ADDRESS, + "dataType": "call", + "data": { + "method": "getRevision", + "params": {} + } + } + response = self._query(query_request) + self.assertEqual(expected_status, response) + + tx_results: List['TransactionResult'] = self.deploy_score("sample_scores", + "sample_dict_db", + self._accounts[0]) + score_address: 'Address' = tx_results[0].score_address + + self.score_call(self._accounts[0], score_address, "create_item", params={"key": "a", "value": hex(1)}) + + with self.assertRaises(InvalidContainerAccessException) as e: + self._query( + { + 'to': score_address, + 'dataType': 'call', + 'data': { + 'method': 'get_items' + } + } + ) + self.assertEqual(e.exception.message, "Not Supported on DictDB") From db7ac400b4eacb19b8592e3ac099573859e1ebbb Mon Sep 17 00:00:00 2001 From: Chiwon Cho Date: Mon, 2 Mar 2020 16:26:11 +0900 Subject: [PATCH 04/15] Optimize Timer class * Remove redundant codes * Fix a minor unittest failure --- iconservice/icon_service_engine.py | 1 - iconservice/utils/timer.py | 16 +++------------- tests/test_icon_score_deployer.py | 3 ++- 3 files changed, 5 insertions(+), 15 deletions(-) diff --git a/iconservice/icon_service_engine.py b/iconservice/icon_service_engine.py index 192999c95..33517f165 100644 --- a/iconservice/icon_service_engine.py +++ b/iconservice/icon_service_engine.py @@ -535,7 +535,6 @@ def invoke(self, # Adjust the number of transactions in a block to make sure that # a leader can broadcast a block candidate to validators in a specific period. if is_block_editable: - tx_timer.stop() if tx_timer.duration >= self._block_invoke_timeout_s: Logger.info( tag=self.TAG, diff --git a/iconservice/utils/timer.py b/iconservice/utils/timer.py index 3712d9b84..5e3d04f32 100644 --- a/iconservice/utils/timer.py +++ b/iconservice/utils/timer.py @@ -19,20 +19,10 @@ class Timer(object): def __init__(self): self._start_time_s: float = 0 - self._duration_s: float = 0 - self._end_time_s: float = 0 @property def duration(self) -> float: - return self._duration_s + return time.time() - self._start_time_s - def start(self) -> float: - time_s: float = time.time() - self._start_time_s = time_s - return time_s - - def stop(self) -> float: - time_s: float = time.time() - self._end_time_s = time_s - self._duration_s = time_s - self._start_time_s - return time_s + def start(self): + self._start_time_s = time.time() diff --git a/tests/test_icon_score_deployer.py b/tests/test_icon_score_deployer.py index fb7063a96..95bac91c1 100644 --- a/tests/test_icon_score_deployer.py +++ b/tests/test_icon_score_deployer.py @@ -96,7 +96,8 @@ def test_install(self): score_deploy_path: str = get_score_deploy_path('/', self.address, tx_hash1) with self.assertRaises(BaseException) as e: IconScoreDeployer.deploy(score_deploy_path, self.read_zipfile_as_byte(self.normal_score_path)) - self.assertIsInstance(e.exception, PermissionError) + # On MacOS OSError is raised which is different from Linux + # self.assertIsInstance(e.exception, PermissionError) # Case when the user try to install scores inner directories. tx_hash3 = create_tx_hash() From 16f850f6e2fd5837f0c8763eeac13e605225f63d Mon Sep 17 00:00:00 2001 From: leeheonseung Date: Thu, 5 Mar 2020 09:50:04 +0900 Subject: [PATCH 05/15] apply feedback add comment, append detail log message --- iconservice/iconscore/icon_container_db.py | 2 +- tests/icon_score/test_icon_container_db.py | 26 ++++++++++++++++--- .../sample_dict_db/sample_dict_db.py | 8 +++++- 3 files changed, 31 insertions(+), 5 deletions(-) diff --git a/iconservice/iconscore/icon_container_db.py b/iconservice/iconscore/icon_container_db.py index 36e6d5d23..f226cd0c7 100644 --- a/iconservice/iconscore/icon_container_db.py +++ b/iconservice/iconscore/icon_container_db.py @@ -217,7 +217,7 @@ def __remove(self, key: K) -> None: self._db.delete(get_encoded_key(key)) def __iter__(self): - raise InvalidContainerAccessException("Not Supported on DictDB") + raise InvalidContainerAccessException("Not Supported iter function on DictDB") class ArrayDB(object): diff --git a/tests/icon_score/test_icon_container_db.py b/tests/icon_score/test_icon_container_db.py index 3ade384f8..c42afbe1a 100644 --- a/tests/icon_score/test_icon_container_db.py +++ b/tests/icon_score/test_icon_container_db.py @@ -340,7 +340,20 @@ def test_container_util(self): prefix: bytes = ContainerUtil.create_db_prefix(VarDB, 'vardb') +""" +Dict DB infinity infinite case +implement __getitem__ function means can be iter +https://www.python.org/dev/peps/pep-0234 +https://docs.python.org/3/reference/datamodel.html#object.__getitem__ +""" + + class TestOnlyGetItemObj: + """ + Override getitem function + If you don't consider this case, this logic will always have an infinite loop. + https://stackoverflow.com/questions/926574/why-does-defining-getitem-on-a-class-make-it-iterable-in-python + """ def __init__(self, limit): self._limit = limit @@ -350,13 +363,19 @@ def __getitem__(self, key): class TestIterObj(TestOnlyGetItemObj): + """ + Override the iter function. + This logic prevents the above infinite loop situation. + https://stackoverflow.com/questions/926574/why-does-defining-getitem-on-a-class-make-it-iterable-in-python + """ def __iter__(self): pass -class TestContainerDB(unittest.TestCase): +class TestIsAvailableToIterator(unittest.TestCase): def test_getitem(self): - limit = 100 + # it is possible to use foreach by using getitem + limit = 100_000 datas = TestOnlyGetItemObj(limit) index = 0 for index, e in enumerate(datas): @@ -364,7 +383,8 @@ def test_getitem(self): self.assertEqual(index, limit) def test_iter(self): - limit = 100 + # prevent infinite loop by using empty __iter function + limit = 100_000 datas = TestIterObj(limit) with self.assertRaises(TypeError) as e: for e in datas: diff --git a/tests/integrate_test/samples/sample_scores/sample_dict_db/sample_dict_db.py b/tests/integrate_test/samples/sample_scores/sample_dict_db/sample_dict_db.py index 9bfd3b355..a304df4c5 100644 --- a/tests/integrate_test/samples/sample_scores/sample_dict_db/sample_dict_db.py +++ b/tests/integrate_test/samples/sample_scores/sample_dict_db/sample_dict_db.py @@ -1,5 +1,12 @@ # -*- coding: utf-8 -*- +""" +Example + +Problem with DictDB : cannot iterate +https://forum.icon.community/t/problem-with-dictdb-cannot-iterate/484 +""" + from iconservice import * TAG = 'IterableDictDB' @@ -28,6 +35,5 @@ def create_item(self, key: str, value: int) -> None: def get_items(self) -> list: items = [] for item in self._dict: - print("11") items.append(item) return items From 927f69ec5a282201c2e7089956147244a9188488 Mon Sep 17 00:00:00 2001 From: Chiwon Cho Date: Wed, 4 Mar 2020 19:52:10 +0900 Subject: [PATCH 06/15] Add sha_256 function for SCORE development * Add a related unittest --- iconservice/__init__.py | 2 +- iconservice/iconscore/icon_score_base2.py | 28 +++++++++++++++++++++-- tests/test_icon_score_api.py | 28 +++++++++++++++++++---- 3 files changed, 51 insertions(+), 7 deletions(-) diff --git a/iconservice/__init__.py b/iconservice/__init__.py index 089f241a3..f476bd526 100644 --- a/iconservice/__init__.py +++ b/iconservice/__init__.py @@ -24,7 +24,7 @@ from .icon_constant import IconServiceFlag from .iconscore.icon_container_db import VarDB, DictDB, ArrayDB from .iconscore.icon_score_base import interface, eventlog, external, payable, IconScoreBase, IconScoreDatabase -from .iconscore.icon_score_base2 import (InterfaceScore, revert, sha3_256, json_loads, json_dumps, +from .iconscore.icon_score_base2 import (InterfaceScore, revert, sha3_256, sha_256, json_loads, json_dumps, get_main_prep_info, get_sub_prep_info, recover_key, create_address_with_key, create_interface_score) from .iconscore.icon_system_score_base import IconSystemScoreBase diff --git a/iconservice/iconscore/icon_score_base2.py b/iconservice/iconscore/icon_score_base2.py index f487ebfb3..0420e4821 100644 --- a/iconservice/iconscore/icon_score_base2.py +++ b/iconservice/iconscore/icon_score_base2.py @@ -103,6 +103,7 @@ def timestamp(self) -> int: class ScoreApiStepRatio(IntEnum): SHA3_256 = 1000 + SHA_256 = 1000 CREATE_ADDRESS_WITH_COMPRESSED_KEY = 15000 CREATE_ADDRESS_WITH_UNCOMPRESSED_KEY = 1500 JSON_DUMPS = 5000 @@ -149,11 +150,33 @@ def revert(message: Optional[str] = None, code: int = 0) -> None: def sha3_256(data: bytes) -> bytes: """ - Computes hash using the input data + Computes sha3_256 hash using the input data :param data: input data :return: hashed data in bytes """ + return _hash("sha3_256", data) + + +def sha_256(data: bytes) -> bytes: + """ + Computes sha256 hash using the input data + + :param data: input data + :return: hashed data in bytes + """ + return _hash("sha256", data) + + +def _hash(name: str, data: bytes) -> bytes: + """Protected hash function + + :param name: hash function name: "sha256" or "sha3_256" + :param data: data to hash + :return: hashed data in bytes + """ + if name not in ("sha3_256", "sha256"): + raise InvalidParamsException(f"Not supported: {name}") if not isinstance(data, bytes): raise InvalidParamsException("Invalid dataType") @@ -171,7 +194,8 @@ def sha3_256(data: bytes) -> bytes: context.step_counter.consume_step(StepType.API_CALL, step) - return hashlib.sha3_256(data).digest() + func = getattr(hashlib, name) + return func(data).digest() def json_dumps(obj: Any) -> str: diff --git a/tests/test_icon_score_api.py b/tests/test_icon_score_api.py index c81abedd0..6efcaafa5 100644 --- a/tests/test_icon_score_api.py +++ b/tests/test_icon_score_api.py @@ -27,7 +27,7 @@ from iconservice.iconscore.icon_score_base2 import ScoreApiStepRatio from iconservice.iconscore.icon_score_base2 import _create_address_with_key, _recover_key from iconservice.iconscore.icon_score_base2 import create_address_with_key, recover_key -from iconservice.iconscore.icon_score_base2 import sha3_256, json_dumps, json_loads +from iconservice.iconscore.icon_score_base2 import sha3_256, sha_256, json_dumps, json_loads from iconservice.iconscore.icon_score_context import ContextContainer from iconservice.iconscore.icon_score_context import IconScoreContext, IconScoreContextType, IconScoreContextFactory from iconservice.iconscore.icon_score_step import IconScoreStepCounterFactory, StepType @@ -245,7 +245,7 @@ def test_create_address_with_key_step_with_tx_v3(self): step_used: int = self.context.step_counter.step_used self.assertEqual(compressed_step_cost, step_used) - def test_sha3_256_step(self): + def test_sha3_256(self): step_cost: int = self._calc_step_cost(ScoreApiStepRatio.SHA3_256) for i in range(0, 512): @@ -253,11 +253,31 @@ def test_sha3_256_step(self): if i % 32 > 0: chunks += 1 - sha3_256(b'\x00' * i) + data: bytes = b'\x00' * i + hash_value: bytes = sha3_256(data) + assert hash_value == hashlib.sha3_256(data).digest() expected_step: int = step_cost + step_cost * chunks // 10 step_used: int = self.context.step_counter.step_used - self.assertEqual(expected_step, step_used) + assert step_used == expected_step + + self.context.step_counter.reset(self.step_limit) + + def test_sha_256(self): + step_cost: int = self._calc_step_cost(ScoreApiStepRatio.SHA_256) + + for i in range(0, 512): + chunks = i // 32 + if i % 32 > 0: + chunks += 1 + + data: bytes = b'\x00' * i + hash_value: bytes = sha_256(data) + assert hash_value == hashlib.sha256(data).digest() + + expected_step: int = step_cost + step_cost * chunks // 10 + step_used: int = self.context.step_counter.step_used + assert step_used == expected_step self.context.step_counter.reset(self.step_limit) From d13fbb9f03ccb8f2b58ccd7581329c63ef682ffc Mon Sep 17 00:00:00 2001 From: leeheonseung <37279322+leeheonseung@users.noreply.github.com> Date: Thu, 12 Mar 2020 08:46:06 +0900 Subject: [PATCH 07/15] IS-1029: Array Optimization hotfix (#424) * IS-1029: Array Optimization hotfix * cache prefix_hash_key * Minor update in IconScoreDatabase * self.prefix_hash_key -> self._prefix_hash_key * Remove concatenation operator "+" from _make_prefix_hash_key() * Change an error message in DictDB: "Not Supported iter function on DictDB" -> "Iteration not supported in DictDB" Co-authored-by: Chiwon Cho --- iconservice/database/db.py | 34 ++- iconservice/iconscore/icon_container_db.py | 96 +++--- .../test_integrate_container_db_patch.py | 2 +- tests/test_container_db.py | 284 ++++++++++++++++++ 4 files changed, 354 insertions(+), 62 deletions(-) create mode 100644 tests/test_container_db.py diff --git a/iconservice/database/db.py b/iconservice/database/db.py index 3a430eed8..9b231cbdd 100644 --- a/iconservice/database/db.py +++ b/iconservice/database/db.py @@ -16,8 +16,8 @@ from typing import TYPE_CHECKING, Optional, Tuple, Iterable import plyvel - from iconcommons.logger import Logger + from .batch import TransactionBatchValue from ..base.exception import DatabaseException, InvalidParamsException, AccessDeniedException from ..icon_constant import ICON_DB_LOG_TAG @@ -384,6 +384,14 @@ def __init__(self, self._context_db = context_db self._observer: Optional[DatabaseObserver] = None + self._prefix_hash_key: bytes = self._make_prefix_hash_key() + + def _make_prefix_hash_key(self) -> bytes: + data = [self.address.to_bytes()] + if self._prefix is not None: + data.append(self._prefix) + return b'|'.join(data) + def get(self, key: bytes) -> bytes: """ Gets the value for the specified key @@ -428,7 +436,7 @@ def get_sub_db(self, prefix: bytes) -> 'IconScoreSubDatabase': 'prefix is None in IconScoreDatabase.get_sub_db()') if self._prefix is not None: - prefix = b'|'.join([self._prefix, prefix]) + prefix = b'|'.join((self._prefix, prefix)) return IconScoreSubDatabase(self.address, self, prefix) @@ -460,12 +468,8 @@ def _hash_key(self, key: bytes) -> bytes: :params key: key passed by SCORE :return: key bytes """ - data = [self.address.to_bytes()] - if self._prefix is not None: - data.append(self._prefix) - data.append(key) - return b'|'.join(data) + return b'|'.join((self._prefix_hash_key, key)) def _validate_ownership(self): """Prevent a SCORE from accessing the database of another SCORE @@ -490,6 +494,14 @@ def __init__(self, address: 'Address', score_db: 'IconScoreDatabase', prefix: by self._prefix = prefix self._score_db = score_db + self._prefix_hash_key: bytes = self._make_prefix_hash_key() + + def _make_prefix_hash_key(self) -> bytes: + data = [] + if self._prefix is not None: + data.append(self._prefix) + return b'|'.join(data) + def get(self, key: bytes) -> bytes: """ Gets the value for the specified key @@ -521,7 +533,7 @@ def get_sub_db(self, prefix: bytes) -> 'IconScoreSubDatabase': raise InvalidParamsException("Invalid prefix") if self._prefix is not None: - prefix = b'|'.join([self._prefix, prefix]) + prefix = b'|'.join((self._prefix, prefix)) return IconScoreSubDatabase(self.address, self._score_db, prefix) @@ -544,9 +556,5 @@ def _hash_key(self, key: bytes) -> bytes: :params key: key passed by SCORE :return: key bytes """ - data = [] - if self._prefix is not None: - data.append(self._prefix) - data.append(key) - return b'|'.join(data) + return b'|'.join((self._prefix_hash_key, key)) diff --git a/iconservice/iconscore/icon_container_db.py b/iconservice/iconscore/icon_container_db.py index f226cd0c7..1ecca476e 100644 --- a/iconservice/iconscore/icon_container_db.py +++ b/iconservice/iconscore/icon_container_db.py @@ -16,10 +16,10 @@ from typing import TypeVar, Optional, Any, Union, TYPE_CHECKING -from .icon_score_context import ContextContainer +from iconservice.icon_constant import IconScoreContextType, Revision +from iconservice.iconscore.icon_score_context import ContextContainer from ..base.address import Address from ..base.exception import InvalidParamsException, InvalidContainerAccessException -from ..icon_constant import Revision, IconScoreContextType from ..utils import int_to_bytes, bytes_to_int if TYPE_CHECKING: @@ -39,27 +39,27 @@ def get_encoded_key(key: V) -> bytes: class ContainerUtil(object): - @staticmethod - def create_db_prefix(cls, var_key: K) -> bytes: + @classmethod + def create_db_prefix(cls, container_cls: type, var_key: K) -> bytes: """Create a prefix used as a parameter of IconScoreDatabase.get_sub_db() - :param cls: ArrayDB, DictDB, VarDB + :param container_cls: ArrayDB, DictDB, VarDB :param var_key: :return: """ - if cls == ArrayDB: + if container_cls == ArrayDB: container_id = ARRAY_DB_ID - elif cls == DictDB: + elif container_cls == DictDB: container_id = DICT_DB_ID else: - raise InvalidParamsException(f'Unsupported container class: {cls}') + raise InvalidParamsException(f'Unsupported container class: {container_cls}') encoded_key: bytes = get_encoded_key(var_key) return b'|'.join([container_id, encoded_key]) - @staticmethod - def encode_key(key: K) -> bytes: + @classmethod + def encode_key(cls, key: K) -> bytes: """Create a key passed to IconScoreDatabase :param key: @@ -80,8 +80,8 @@ def encode_key(key: K) -> bytes: raise InvalidParamsException(f'Unsupported key type: {type(key)}') return bytes_key - @staticmethod - def encode_value(value: V) -> bytes: + @classmethod + def encode_value(cls, value: V) -> bytes: if isinstance(value, int): byte_value = int_to_bytes(value) elif isinstance(value, str): @@ -96,8 +96,8 @@ def encode_value(value: V) -> bytes: raise InvalidParamsException(f'Unsupported value type: {type(value)}') return byte_value - @staticmethod - def decode_object(value: bytes, value_type: type) -> Optional[Union[K, V]]: + @classmethod + def decode_object(cls, value: bytes, value_type: type) -> Optional[Union[K, V]]: if value is None: return get_default_value(value_type) @@ -114,45 +114,45 @@ def decode_object(value: bytes, value_type: type) -> Optional[Union[K, V]]: obj_value = value return obj_value - @staticmethod - def remove_prefix_from_iters(iter_items: iter) -> iter: - return ((ContainerUtil.__remove_prefix_from_key(key), value) for key, value in iter_items) + @classmethod + def remove_prefix_from_iters(cls, iter_items: iter) -> iter: + return ((cls.__remove_prefix_from_key(key), value) for key, value in iter_items) - @staticmethod - def __remove_prefix_from_key(key_from_bytes: bytes) -> bytes: + @classmethod + def __remove_prefix_from_key(cls, key_from_bytes: bytes) -> bytes: return key_from_bytes[:-1] - @staticmethod - def put_to_db(db: 'IconScoreDatabase', db_key: str, container: iter) -> None: - sub_db = db.get_sub_db(ContainerUtil.encode_key(db_key)) + @classmethod + def put_to_db(cls, db: 'IconScoreDatabase', db_key: str, container: iter) -> None: + sub_db = db.get_sub_db(cls.encode_key(db_key)) if isinstance(container, dict): - ContainerUtil.__put_to_db_internal(sub_db, container.items()) + cls.__put_to_db_internal(sub_db, container.items()) elif isinstance(container, (list, set, tuple)): - ContainerUtil.__put_to_db_internal(sub_db, enumerate(container)) + cls.__put_to_db_internal(sub_db, enumerate(container)) - @staticmethod - def get_from_db(db: 'IconScoreDatabase', db_key: str, *args, value_type: type) -> Optional[K]: - sub_db = db.get_sub_db(ContainerUtil.encode_key(db_key)) + @classmethod + def get_from_db(cls, db: 'IconScoreDatabase', db_key: str, *args, value_type: type) -> Optional[K]: + sub_db = db.get_sub_db(cls.encode_key(db_key)) *args, last_arg = args for arg in args: - sub_db = sub_db.get_sub_db(ContainerUtil.encode_key(arg)) + sub_db = sub_db.get_sub_db(cls.encode_key(arg)) - byte_key = sub_db.get(ContainerUtil.encode_key(last_arg)) + byte_key = sub_db.get(cls.encode_key(last_arg)) if byte_key is None: return get_default_value(value_type) - return ContainerUtil.decode_object(byte_key, value_type) + return cls.decode_object(byte_key, value_type) - @staticmethod - def __put_to_db_internal(db: Union['IconScoreDatabase', 'IconScoreSubDatabase'], iters: iter) -> None: + @classmethod + def __put_to_db_internal(cls, db: Union['IconScoreDatabase', 'IconScoreSubDatabase'], iters: iter) -> None: for key, value in iters: - sub_db = db.get_sub_db(ContainerUtil.encode_key(key)) + sub_db = db.get_sub_db(cls.encode_key(key)) if isinstance(value, dict): - ContainerUtil.__put_to_db_internal(sub_db, value.items()) + cls.__put_to_db_internal(sub_db, value.items()) elif isinstance(value, (list, set, tuple)): - ContainerUtil.__put_to_db_internal(sub_db, enumerate(value)) + cls.__put_to_db_internal(sub_db, enumerate(value)) else: - db_key = ContainerUtil.encode_key(key) - db_value = ContainerUtil.encode_value(value) + db_key = cls.encode_key(key) + db_value = cls.encode_value(value) db.put(db_key, db_value) @@ -217,7 +217,7 @@ def __remove(self, key: K) -> None: self._db.delete(get_encoded_key(key)) def __iter__(self): - raise InvalidContainerAccessException("Not Supported iter function on DictDB") + raise InvalidContainerAccessException("Iteration not supported in DictDB") class ArrayDB(object): @@ -279,12 +279,12 @@ def __get_size(self) -> int: return self.__get_size_from_db() def __get_size_from_db(self) -> int: - return ContainerUtil.decode_object(self._db.get(ArrayDB.__SIZE_BYTE_KEY), int) + return ContainerUtil.decode_object(self._db.get(self.__SIZE_BYTE_KEY), int) def __set_size(self, size: int) -> None: self.__legacy_size = size byte_value = ContainerUtil.encode_value(size) - self._db.put(ArrayDB.__SIZE_BYTE_KEY, byte_value) + self._db.put(self.__SIZE_BYTE_KEY, byte_value) def __put(self, index: int, value: V) -> None: byte_value = ContainerUtil.encode_value(value) @@ -312,7 +312,7 @@ def __setitem__(self, index: int, value: V) -> None: raise InvalidParamsException('ArrayDB out of index') def __getitem__(self, index: int) -> V: - return ArrayDB._get(self._db, self.__get_size(), index, self.__value_type) + return self._get(self._db, self.__get_size(), index, self.__value_type) def __contains__(self, item: V): for e in self: @@ -320,14 +320,14 @@ def __contains__(self, item: V): return True return False - @staticmethod - def __is_defective_revision(): + @classmethod + def __is_defective_revision(cls): context = ContextContainer._get_context() revision = context.revision return context.type == IconScoreContextType.INVOKE and revision < Revision.THREE.value - @staticmethod - def _get(db: Union['IconScoreDatabase', 'IconScoreSubDatabase'], size: int, index: int, value_type: type) -> V: + @classmethod + def _get(cls, db: Union['IconScoreDatabase', 'IconScoreSubDatabase'], size: int, index: int, value_type: type) -> V: if not isinstance(index, int): raise InvalidParamsException('Invalid index type: not an integer') @@ -341,10 +341,10 @@ def _get(db: Union['IconScoreDatabase', 'IconScoreSubDatabase'], size: int, inde raise InvalidParamsException('ArrayDB out of index') - @staticmethod - def _get_generator(db: Union['IconScoreDatabase', 'IconScoreSubDatabase'], size: int, value_type: type): + @classmethod + def _get_generator(cls, db: Union['IconScoreDatabase', 'IconScoreSubDatabase'], size: int, value_type: type): for index in range(size): - yield ArrayDB._get(db, size, index, value_type) + yield cls._get(db, size, index, value_type) class VarDB(object): diff --git a/tests/integrate_test/test_integrate_container_db_patch.py b/tests/integrate_test/test_integrate_container_db_patch.py index c958138e9..501405e0c 100644 --- a/tests/integrate_test/test_integrate_container_db_patch.py +++ b/tests/integrate_test/test_integrate_container_db_patch.py @@ -154,4 +154,4 @@ def test_dict_db_defective(self): } } ) - self.assertEqual(e.exception.message, "Not Supported on DictDB") + self.assertEqual(e.exception.message, "Iteration not supported in DictDB") diff --git a/tests/test_container_db.py b/tests/test_container_db.py new file mode 100644 index 000000000..7bcb47d6b --- /dev/null +++ b/tests/test_container_db.py @@ -0,0 +1,284 @@ +import unittest + +import time +import plyvel + +from iconservice import VarDB, ArrayDB, DictDB, Address +from iconservice.database.db import KeyValueDatabase, ContextDatabase, IconScoreDatabase +from iconservice.icon_constant import IconScoreContextType +from iconservice.iconscore.icon_container_db import ContainerUtil +from iconservice.iconscore.icon_score_context import ContextContainer +from iconservice.iconscore.icon_score_context import IconScoreContext +from tests import create_address, rmtree +from tests.mock_db import MockKeyValueDatabase + +DB_PATH: str = ".mycom22_db" +VAR_DB: str = "test_var" +ARRAY_DB: str = "test_array" +DICT_DB1: str = "test_dict1" +DICT_DB2: str = "test_dict2" +SCORE_ADDR: 'Address' = create_address(1, b'0') + +REVISION: int = 10 +INDEX: int = 7 + +DISABLE = True + +# RANGE_LIST = [10, 50, 100, 500, 1000, 5000, 10000, 50000, 100000, 500000, 1000000, 5000000] +RANGE_LIST = [5000000] + +SCORE_ADDR_BYTES = SCORE_ADDR.to_bytes() + + +@unittest.skipIf(condition=DISABLE, reason="DISABLE") +class TestPlyvelDB(unittest.TestCase): + """ + Native PlyvelDB performance check + """ + + def _hash_key_bypass(self, key: bytes) -> bytes: + return key + + def _hash_key_origin(self, key: bytes) -> bytes: + data = [SCORE_ADDR.to_bytes()] + data.append(b'0x10') + data.append(key) + return b'|'.join(data) + + def _hash_key_cache_bytes(self, key: bytes) -> bytes: + data = [SCORE_ADDR_BYTES] + data.append(b'0x10') + data.append(key) + return b'|'.join(data) + + def _hash_key_cache_bytes_and_remove_append(self, key: bytes) -> bytes: + data = [SCORE_ADDR_BYTES, b'0x10', key] + return b'|'.join(data) + + def _put(self, range_cnt: int, hash_func: callable): + db = plyvel.DB(f"{DB_PATH}_{range_cnt}", create_if_missing=True) + + for i in range(range_cnt): + key = f"{i}".encode() + hashed_key = hash_func(key) + db.put(hashed_key, SCORE_ADDR_BYTES) + + def _get(self, range_cnt: int, hash_func: callable): + db = plyvel.DB(f"{DB_PATH}_{range_cnt}", create_if_missing=True) + + start = time.time() + + for i in range(range_cnt): + key = f"{i}".encode() + hashed_key = hash_func(key) + db.get(hashed_key) + + print(f"_get[{hash_func.__name__} {range_cnt} :", time.time() - start) + + def test_put(self): + for i in RANGE_LIST: + rmtree(f"{DB_PATH}_{i}") + + for i in RANGE_LIST: + self._put(i, self._hash_key_bypass) + + def test_get(self): + for i in RANGE_LIST: + self._get(i, self._hash_key_bypass) + + +@unittest.skipIf(condition=DISABLE, reason="DISABLE") +class TestPrebuildForContainerDB(unittest.TestCase): + """ + Prebuild DB for ContainerDB get + """ + + def _create_plyvel_db(self, range_cnt: int): + _db = KeyValueDatabase.from_path(f"{DB_PATH}{range_cnt}") + context_db = ContextDatabase(_db) + return IconScoreDatabase(SCORE_ADDR, context_db) + + def _create_new_db(self, range_cnt: int): + self.db = self._create_plyvel_db(range_cnt) + self._context = IconScoreContext(IconScoreContextType.DIRECT) + self._context.current_address = self.db.address + self._context.revision = REVISION + ContextContainer._push_context(self._context) + + ## LOGIC + + var_db = VarDB(VAR_DB, self.db, value_type=int) + array_db = ArrayDB(ARRAY_DB, self.db, value_type=Address) + dict_db1 = DictDB(DICT_DB1, self.db, value_type=Address) + dict_db2 = DictDB(DICT_DB2, self.db, value_type=int) + + index: int = 0 + for index in range(range_cnt): + addr: 'Address' = create_address() + array_db.put(addr) + dict_db1[index] = addr + dict_db2[addr] = index + var_db.set(index) + + ContextContainer._pop_context() + + def test_create_db(self): + for i in RANGE_LIST: + rmtree(f"{DB_PATH}{i}") + + for i in RANGE_LIST: + self._create_new_db(i) + + +def _create_plyvel_db(range_cnt: int): + _db = KeyValueDatabase.from_path(f"{DB_PATH}{range_cnt}") + context_db = ContextDatabase(_db) + return IconScoreDatabase(SCORE_ADDR, context_db) + + +def _create_mock_db(range_cnt: int): + mock_db = MockKeyValueDatabase.create_db() + context_db = ContextDatabase(mock_db) + return IconScoreDatabase(SCORE_ADDR, context_db) + + +# for profile +def _for_profile_function(range_cnt: int, _create_db_func: callable): + db = _create_db_func(range_cnt) + _context = IconScoreContext(IconScoreContextType.DIRECT) + _context.current_address = db.address + _context.revision = REVISION + ContextContainer._push_context(_context) + + array_db = ArrayDB(ARRAY_DB, db, value_type=Address) + + for index in range(range_cnt): + addr: 'Address' = create_address() + array_db.put(addr) + + for i in range(range_cnt): + a = array_db[i] + + ContextContainer._clear_context() + + +@unittest.skipIf(condition=DISABLE, reason="DISABLE") +class TestIconContainerDB(unittest.TestCase): + def _setup(self, range_cnt: int, _create_db_func: callable): + self.db = _create_db_func(range_cnt) + self._context = IconScoreContext(IconScoreContextType.DIRECT) + self._context.current_address = self.db.address + self._context.revision = REVISION + ContextContainer._push_context(self._context) + + def _tear_down(self): + ContextContainer._clear_context() + self.db = None + # rmtree(f"{DB_PATH}{range_cnt}") + + def _var_db_perfomance(self, + range_cnt: int, + _create_db_func: callable): + self._setup(range_cnt, _create_db_func) + + var_db = VarDB(VAR_DB, self.db, value_type=Address) + var_db.set(0) + + start = time.time() + + # LOGIC + for i in range(range_cnt): + a = var_db.get() + + print(f"_var_db_perfomance [{_create_db_func.__name__} {range_cnt} :", time.time() - start) + + self._tear_down() + + def _array_db_perfomance(self, + range_cnt: int, + _create_db_func: callable): + self._setup(range_cnt, _create_db_func) + + array_db = ArrayDB(ARRAY_DB, self.db, value_type=Address) + for index in range(range_cnt): + addr: 'Address' = create_address() + array_db.put(addr) + + start = time.time() + + # LOGIC + for i in range(range_cnt): + a = array_db[i] + + print(f"_array_db_perfomance [{_create_db_func.__name__} {range_cnt} :", time.time() - start) + + self._tear_down() + + def _dict_db_perfomance(self, + range_cnt: int, + _create_db_func: callable): + self._setup(range_cnt, _create_db_func) + + dict_db = DictDB(DICT_DB1, self.db, value_type=Address) + for index in range(range_cnt): + addr: 'Address' = create_address() + dict_db[index] = addr + start = time.time() + + # LOGIC + for i in range(range_cnt): + a = dict_db[i] + + print(f"_dict_db_perfomance [{_create_db_func.__name__} {range_cnt} :", time.time() - start) + + self._tear_down() + + def _complex_db_perfomance(self, + range_cnt: int, + _create_db_func: callable): + self._setup(range_cnt, _create_db_func) + + array_db = ArrayDB(ARRAY_DB, self.db, value_type=Address) + dict_db = DictDB(DICT_DB2, self.db, value_type=Address) + + for index in range(range_cnt): + addr: 'Address' = create_address() + array_db.put(addr) + dict_db[addr] = index + + start = time.time() + + # LOGIC + for i in range(range_cnt): + a = dict_db[array_db[0]] + + print(f"_complex_db_perfomance [{_create_db_func.__name__} {range_cnt} :", time.time() - start) + + self._tear_down() + + def test_var_db_performance(self): + for count in RANGE_LIST: + self._var_db_perfomance(count, _create_mock_db) + + def test_array_db_performance(self): + for count in RANGE_LIST: + self._array_db_perfomance(count, _create_mock_db) + + def test_dict_db_performance(self): + for count in RANGE_LIST: + self._dict_db_perfomance(count, _create_mock_db) + + def test_complex_db_performance(self): + for count in RANGE_LIST: + self._complex_db_perfomance(count, _create_mock_db) + + def test_profile(self): + from cProfile import Profile + from pstats import Stats + + # LOGIC + p = Profile() + p.runcall(_for_profile_function, 100_000, _create_mock_db) + + stats = Stats(p) + stats.print_stats() From 6827daa98478b8231181cc0a0b7717e434aeaca4 Mon Sep 17 00:00:00 2001 From: Chiwon Cho Date: Mon, 16 Mar 2020 11:01:43 +0900 Subject: [PATCH 08/15] VERSION: 1.6.1rc1 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 2eda823ff..85c407614 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.6.1 \ No newline at end of file +1.6.1rc1 From 2713c66b442eda78bf48993cc3f5ce220d92134d Mon Sep 17 00:00:00 2001 From: leeheonseung <37279322+leeheonseung@users.noreply.github.com> Date: Fri, 20 Mar 2020 11:40:05 +0900 Subject: [PATCH 09/15] Replace time.time() with time.monotonic() in Timer class (#427) Co-authored-by: Chiwon Cho --- iconservice/utils/timer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/iconservice/utils/timer.py b/iconservice/utils/timer.py index 5e3d04f32..86e99026e 100644 --- a/iconservice/utils/timer.py +++ b/iconservice/utils/timer.py @@ -22,7 +22,7 @@ def __init__(self): @property def duration(self) -> float: - return time.time() - self._start_time_s + return time.monotonic() - self._start_time_s def start(self): - self._start_time_s = time.time() + self._start_time_s = time.monotonic() From f93ddebcfbb4501e61bfae539f9e58e0103a8e61 Mon Sep 17 00:00:00 2001 From: leeheonseung <37279322+leeheonseung@users.noreply.github.com> Date: Fri, 20 Mar 2020 11:40:24 +0900 Subject: [PATCH 10/15] Skip EOA to EOA icx transfer in execution time check (#428) Co-authored-by: Chiwon Cho --- iconservice/icon_service_engine.py | 34 ++++++++++++++++++++++++------ 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/iconservice/icon_service_engine.py b/iconservice/icon_service_engine.py index 33517f165..f694555f3 100644 --- a/iconservice/icon_service_engine.py +++ b/iconservice/icon_service_engine.py @@ -534,12 +534,11 @@ def invoke(self, for index, tx_request in enumerate(tx_requests): # Adjust the number of transactions in a block to make sure that # a leader can broadcast a block candidate to validators in a specific period. - if is_block_editable: - if tx_timer.duration >= self._block_invoke_timeout_s: - Logger.info( - tag=self.TAG, - msg=f"Stop to invoke remaining transactions: {index} / {len(tx_requests)}") - break + if is_block_editable and not self._continue_to_invoke(tx_request, tx_timer): + Logger.info( + tag=self.TAG, + msg=f"Stop to invoke remaining transactions: {index} / {len(tx_requests)}") + break if index == BASE_TRANSACTION_INDEX and context.is_decentralized(): if not tx_request['params'].get('dataType') == "base": @@ -2414,3 +2413,26 @@ def _set_block_invoke_timeout(self, conf: Dict[str, Union[str, int]]): pass Logger.info(tag=self.TAG, msg=f"{ConfigKey.BLOCK_INVOKE_TIMEOUT}: {self._block_invoke_timeout_s}") + + def _continue_to_invoke(self, tx_request: Dict, tx_timer: 'Timer') -> bool: + """If this is a block created by a leader, + check to continue transaction invoking with block_invoke_timeout + + :param tx_request: + :param tx_timer: + :return: + """ + to: Optional['Address'] = tx_request["params"].get("to") + + # Skip EOA to EOA coin transfer in execution time check + if to and to.is_contract: + if tx_timer.duration >= self._block_invoke_timeout_s: + Logger.info( + tag=self.TAG, + msg=f"Stop transaction invoking: " + f"duration={tx_timer.duration} " + f"block_invoke_timeout={self._block_invoke_timeout_s}" + ) + return False + + return True From 2767eec79816d31e08b04c37d806e330ff1d0668 Mon Sep 17 00:00:00 2001 From: leeheonseung Date: Fri, 20 Mar 2020 11:39:29 +0900 Subject: [PATCH 11/15] apply static version about msgpack, iso3166 for migration temporarily --- requirements.txt | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 2e1ef8aa8..a4118fb1d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,5 +3,6 @@ plyvel>=1.0.5 secp256k1==0.13.2 earlgrey>=0.0.4 iconcommons>=1.0.5 -msgpack -iso3166 +msgpack==0.6.2 +iso3166==1.0.1 + From 84e75f0818a60f5e6ec06e8d4c5941b50a90caa7 Mon Sep 17 00:00:00 2001 From: leeheonseung Date: Fri, 20 Mar 2020 11:41:25 +0900 Subject: [PATCH 12/15] VERSION: 1.6.1rc2 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 85c407614..d023fc216 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.6.1rc1 +1.6.1rc2 From 9774fa21f973cfecf9fd6747c2714d1689426d73 Mon Sep 17 00:00:00 2001 From: eunsoopark Date: Tue, 24 Mar 2020 10:41:46 +0900 Subject: [PATCH 13/15] IS-1042 fix Fee 2.0 bug - Can't withdraw deposit if penalty is charged --- iconservice/icx/engine.py | 8 ----- iconservice/icx/storage.py | 8 +++++ .../test_integrate_fee_sharing.py | 36 +++++++++++++++++++ 3 files changed, 44 insertions(+), 8 deletions(-) diff --git a/iconservice/icx/engine.py b/iconservice/icx/engine.py index 1176f0bb0..3f8140787 100644 --- a/iconservice/icx/engine.py +++ b/iconservice/icx/engine.py @@ -97,11 +97,3 @@ def _transfer(self, context.storage.icx.put_account(context, to_account) return True - - def get_treasury_account(self, context: 'IconScoreContext') -> 'Account': - """Returns the instance of treasury account - - :param context: - :return: Account - """ - return context.storage.icx.get_account(context, context.storage.icx.fee_treasury) diff --git a/iconservice/icx/storage.py b/iconservice/icx/storage.py index c3962753c..72f9b1a35 100644 --- a/iconservice/icx/storage.py +++ b/iconservice/icx/storage.py @@ -283,6 +283,14 @@ def get_account(self, stake_part=stake_part, delegation_part=delegation_part) + def get_treasury_account(self, context: 'IconScoreContext') -> 'Account': + """Returns the instance of treasury account + + :param context: + :return: Account + """ + return context.storage.icx.get_account(context, context.storage.icx.fee_treasury) + def _get_part(self, context: 'IconScoreContext', part_class: Union[type(CoinPart), type(StakePart), type(DelegationPart)], address: 'Address') -> Union['CoinPart', 'StakePart', 'DelegationPart']: diff --git a/tests/integrate_test/test_integrate_fee_sharing.py b/tests/integrate_test/test_integrate_fee_sharing.py index 4ba659323..76f7cdccd 100644 --- a/tests/integrate_test/test_integrate_fee_sharing.py +++ b/tests/integrate_test/test_integrate_fee_sharing.py @@ -447,6 +447,42 @@ def test_withdraw_deposit_after_deposit(self): tx_results: List['TransactionResult'] = self.withdraw_deposit(deposit_id=deposit_id, score_address=self.score_address) self.assertTrue(tx_results[0].status) + event_log = tx_results[0].event_logs[0] + self.assertEqual('DepositWithdrawn(bytes,Address,int,int)', event_log.indexed[0]) + self.assertEqual(event_log.data[0], MIN_DEPOSIT_AMOUNT) # withdraw amount + self.assertEqual(event_log.data[1], 0) # penalty amount + + score_info: dict = self._query_score_info(self.score_address) + self.assertNotIn('depositInfo', score_info) + + def test_withdraw_deposit_with_penalty(self): + """ + Given : The SCORE is deployed, deposit once and . + When : Withdraws the deposit. + Then : Amount of availableDeposit is 0. + """ + tx_results: List['TransactionResult'] = self.deposit_icx(score_address=self.score_address, + amount=MIN_DEPOSIT_AMOUNT, + period=MIN_DEPOSIT_TERM) + deposit_id: bytes = tx_results[0].tx_hash + + score_info: dict = self._query_score_info(self.score_address) + self.assertIn('depositInfo', score_info) + self.assertIn(deposit_id, map(lambda d: d['id'], score_info['depositInfo']['deposits'])) + + # invoke score method to use virtual step + self.score_call(from_=self._admin, + to_=self.score_address, + func_name="set_value", + params={"value": hex(100), "proportion": hex(100)}) + + tx_results: List['TransactionResult'] = self.withdraw_deposit(deposit_id=deposit_id, + score_address=self.score_address) + self.assertTrue(tx_results[0].status) + event_log = tx_results[0].event_logs[0] + self.assertEqual('DepositWithdrawn(bytes,Address,int,int)', event_log.indexed[0]) + self.assertTrue(event_log.data[0] < MIN_DEPOSIT_AMOUNT) # withdraw amount + self.assertTrue(event_log.data[1] > 0) # penalty amount score_info: dict = self._query_score_info(self.score_address) self.assertNotIn('depositInfo', score_info) From 1a320c574715ab035b2b47fdb681506649fdeff4 Mon Sep 17 00:00:00 2001 From: "Seungmin.Ryu" Date: Tue, 24 Mar 2020 11:03:40 +0900 Subject: [PATCH 14/15] VERSION: 1.6.1rc3 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index d023fc216..21f69d19e 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.6.1rc2 +1.6.1rc3 From 56b34608076124dd674299ba67cb0d8f429eafc5 Mon Sep 17 00:00:00 2001 From: Chiwon Cho Date: Wed, 1 Apr 2020 17:23:58 +0900 Subject: [PATCH 15/15] VERSION: 1.6.1 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 21f69d19e..9c6d6293b 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.6.1rc3 +1.6.1