From 684bdef8c50fd56550fa2688b8281a2556594698 Mon Sep 17 00:00:00 2001 From: Sourcery AI <> Date: Wed, 13 Dec 2023 08:29:08 +0000 Subject: [PATCH] 'Refactored by Sourcery' --- .../examples/charts/error-bars-asymmetric.py | 14 +- .../examples/charts/histogram-cumulative.py | 2 +- .../examples/charts/histogram-horizontal.py | 2 +- doc/gui/examples/charts/histogram-nbins.py | 2 +- .../examples/charts/histogram-normalized.py | 2 +- doc/gui/examples/charts/histogram-overlay.py | 5 +- doc/gui/examples/charts/histogram-simple.py | 2 +- doc/gui/examples/charts/histogram-stacked.py | 5 +- doc/gui/examples/charts/treemap-simple.py | 8 +- taipy/config/__init__.py | 7 +- taipy/config/_config.py | 6 +- taipy/config/_serializer/_base_serializer.py | 23 ++- .../checker/_checkers/_config_checker.py | 21 +-- taipy/config/common/_config_blocker.py | 2 +- taipy/config/common/_template_handler.py | 5 +- taipy/config/config.py | 20 +- taipy/core/_backup/_backup.py | 47 ++--- taipy/core/_core_cli.py | 4 +- taipy/core/_entity/_dag.py | 16 +- taipy/core/_entity/_entity.py | 2 +- taipy/core/_entity/_labeled.py | 8 +- taipy/core/_entity/_migrate/_utils.py | 8 +- taipy/core/_entity/_migrate_cli.py | 10 +- taipy/core/_entity/_reload.py | 13 +- taipy/core/_entity/submittable.py | 15 +- .../_dispatcher/_task_function_wrapper.py | 3 +- taipy/core/_orchestrator/_orchestrator.py | 58 +++--- taipy/core/_repository/_base_taipy_model.py | 3 +- taipy/core/_repository/_encoder.py | 9 +- .../_repository/_filesystem_repository.py | 11 +- taipy/core/_repository/_sql_repository.py | 18 +- taipy/core/_repository/db/_sql_connection.py | 5 +- taipy/core/_version/_utils.py | 7 +- taipy/core/_version/_version_manager.py | 19 +- .../core/_version/_version_manager_factory.py | 10 +- taipy/core/_version/_version_mixin.py | 9 +- taipy/core/common/_mongo_connector.py | 2 +- taipy/core/common/_utils.py | 19 +- .../config/checkers/_config_id_checker.py | 2 +- .../checkers/_data_node_config_checker.py | 173 +++++++++--------- .../checkers/_scenario_config_checker.py | 9 +- .../config/checkers/_task_config_checker.py | 15 +- taipy/core/config/data_node_config.py | 31 ++-- taipy/core/config/job_config.py | 3 +- taipy/core/config/scenario_config.py | 17 +- taipy/core/cycle/_cycle_manager_factory.py | 10 +- taipy/core/cycle/cycle.py | 8 +- taipy/core/data/_abstract_sql.py | 2 +- taipy/core/data/_data_converter.py | 10 +- taipy/core/data/_data_manager.py | 3 +- taipy/core/data/_data_manager_factory.py | 10 +- taipy/core/data/_filter.py | 59 +++--- taipy/core/data/csv.py | 55 +++--- taipy/core/data/data_node.py | 37 ++-- taipy/core/data/excel.py | 16 +- taipy/core/data/generic.py | 5 +- taipy/core/data/mongo.py | 2 +- taipy/core/exceptions/exceptions.py | 2 +- taipy/core/job/_job_manager.py | 13 +- taipy/core/job/_job_manager_factory.py | 10 +- taipy/core/notification/_topic.py | 6 +- taipy/core/notification/event.py | 6 +- taipy/core/notification/notifier.py | 8 +- taipy/core/scenario/_scenario_converter.py | 23 ++- taipy/core/scenario/_scenario_manager.py | 84 ++++----- .../scenario/_scenario_manager_factory.py | 10 +- taipy/core/scenario/scenario.py | 59 +++--- taipy/core/sequence/_sequence_manager.py | 32 ++-- .../sequence/_sequence_manager_factory.py | 15 +- taipy/core/sequence/sequence.py | 18 +- .../core/submission/_submission_converter.py | 3 +- taipy/core/submission/_submission_manager.py | 18 +- .../submission/_submission_manager_factory.py | 9 +- taipy/core/submission/submission.py | 10 +- taipy/core/taipy.py | 4 +- taipy/core/task/_task_manager.py | 8 +- taipy/core/task/_task_manager_factory.py | 10 +- taipy/core/task/_task_model.py | 8 +- taipy/core/task/task.py | 7 +- taipy/gui/_gui_cli.py | 4 +- taipy/gui/_renderers/_markdown/preproc.py | 8 +- taipy/gui/_renderers/builder.py | 35 ++-- taipy/gui/_renderers/factory.py | 6 +- taipy/gui/builder/_api_generator.py | 13 +- taipy/gui/builder/_element.py | 14 +- taipy/gui/data/content_accessor.py | 14 +- taipy/gui/data/utils.py | 12 +- taipy/gui/extension/library.py | 6 +- taipy/gui/gui.py | 32 ++-- taipy/gui/utils/_evaluator.py | 39 ++-- taipy/gui/utils/_variable_directory.py | 2 +- taipy/gui/utils/chart_config_builder.py | 11 +- taipy/gui/utils/html.py | 8 +- taipy/gui/utils/types.py | 28 +-- taipy/gui_core/_adapters.py | 15 +- taipy/gui_core/_context.py | 162 ++++++++-------- taipy/rest/api/resources/cycle.py | 6 +- taipy/rest/api/resources/datanode.py | 12 +- taipy/rest/api/resources/job.py | 6 +- taipy/rest/api/resources/scenario.py | 6 +- taipy/rest/api/resources/task.py | 6 +- taipy/rest/commons/apispec.py | 4 +- taipy/rest/commons/encoder.py | 7 +- .../default/hooks/post_gen_project.py | 14 +- .../hooks/post_gen_project.py | 2 +- .../config/config.py | 7 +- .../tests/test_scenario_mgt_template.py | 8 +- .../global_app/test_global_app_config.py | 2 +- tests/config/test_override_config.py | 2 +- tests/config/test_section_serialization.py | 10 +- tests/core/_manager/test_manager.py | 5 +- tests/core/_orchestrator/test_orchestrator.py | 40 +++- .../common/test_warn_if_inputs_not_ready.py | 16 +- tests/core/config/test_config.py | 11 +- .../core/config/test_config_serialization.py | 52 +++--- tests/core/config/test_override_config.py | 2 +- tests/core/config/test_scenario_config.py | 7 +- tests/core/conftest.py | 4 +- tests/core/data/test_data_node.py | 3 +- tests/core/data/test_filter_data_node.py | 22 +-- tests/core/data/test_generic_data_node.py | 6 +- tests/core/data/test_json_data_node.py | 4 +- tests/core/data/test_parquet_data_node.py | 4 +- tests/core/job/test_job.py | 2 +- tests/core/job/test_job_manager.py | 2 +- .../job/test_job_manager_with_sql_repo.py | 2 +- tests/core/notification/test_notifier.py | 72 +++----- tests/core/repository/mocks.py | 5 +- tests/core/scenario/test_scenario.py | 33 ++-- tests/core/scenario/test_scenario_manager.py | 38 +++- .../test_scenario_manager_with_sql_repo.py | 2 +- tests/core/sequence/test_sequence.py | 17 +- tests/core/sequence/test_sequence_manager.py | 29 ++- tests/core/task/test_task.py | 3 +- tests/core/task/test_task_manager.py | 16 +- tests/core/test_core_cli.py | 2 +- tests/core/test_core_cli_with_sql_repo.py | 2 +- tests/core/test_taipy.py | 6 +- .../version/test_version_cli_with_sql_repo.py | 6 +- tests/gui/builder/control/test_chart.py | 12 +- tests/gui/control/test_chart.py | 12 +- .../page_scopes/assets2_class_scopes/page1.py | 4 +- tests/gui/utils/test_map_dict.py | 23 +-- tests/gui_core/test_context_is_deletable.py | 12 +- tests/gui_core/test_context_is_editable.py | 12 +- tests/gui_core/test_context_is_promotable.py | 4 +- tests/gui_core/test_context_is_readable.py | 22 +-- tests/gui_core/test_context_is_submitable.py | 4 +- tests/rest/conftest.py | 23 +-- tools/frontend/bundle_build.py | 10 +- tools/gui/generate_pyi.py | 4 +- tools/packages/taipy-gui/setup.py | 5 +- tools/packages/taipy/setup.py | 5 +- 153 files changed, 1195 insertions(+), 1123 deletions(-) diff --git a/doc/gui/examples/charts/error-bars-asymmetric.py b/doc/gui/examples/charts/error-bars-asymmetric.py index 1ac8c35fab..316d1638ed 100644 --- a/doc/gui/examples/charts/error-bars-asymmetric.py +++ b/doc/gui/examples/charts/error-bars-asymmetric.py @@ -22,22 +22,14 @@ # y values: [0..n_samples-1] y = range(0, n_samples) -data = { - # The x series is made of random numbers between 1 and 10 - "x": [random.uniform(1, 10) for i in y], - "y": y, -} +data = {"x": [random.uniform(1, 10) for _ in y], "y": y} options = { "error_x": { "type": "data", - # Allows for a 'plus' and a 'minus' error data "symmetric": False, - # The 'plus' error data is a series of random numbers - "array": [random.uniform(0, 5) for i in y], - # The 'minus' error data is a series of random numbers - "arrayminus": [random.uniform(0, 2) for i in y], - # Color of the error bar + "array": [random.uniform(0, 5) for _ in y], + "arrayminus": [random.uniform(0, 2) for _ in y], "color": "red", } } diff --git a/doc/gui/examples/charts/histogram-cumulative.py b/doc/gui/examples/charts/histogram-cumulative.py index 78bf096ef6..bd7703b8a2 100644 --- a/doc/gui/examples/charts/histogram-cumulative.py +++ b/doc/gui/examples/charts/histogram-cumulative.py @@ -18,7 +18,7 @@ from taipy.gui import Gui # Random data set -data = [random.random() for i in range(500)] +data = [random.random() for _ in range(500)] options = { # Enable the cumulative histogram diff --git a/doc/gui/examples/charts/histogram-horizontal.py b/doc/gui/examples/charts/histogram-horizontal.py index 222cdc64dd..6de42b67ea 100644 --- a/doc/gui/examples/charts/histogram-horizontal.py +++ b/doc/gui/examples/charts/histogram-horizontal.py @@ -18,7 +18,7 @@ from taipy.gui import Gui # Random data set -data = {"Count": [random.random() for i in range(100)]} +data = {"Count": [random.random() for _ in range(100)]} page = """ # Histograms - Horizontal diff --git a/doc/gui/examples/charts/histogram-nbins.py b/doc/gui/examples/charts/histogram-nbins.py index 4dd91050ed..365e00f02c 100644 --- a/doc/gui/examples/charts/histogram-nbins.py +++ b/doc/gui/examples/charts/histogram-nbins.py @@ -18,7 +18,7 @@ from taipy.gui import Gui # Random set of 100 samples -samples = {"x": [random.gauss() for i in range(100)]} +samples = {"x": [random.gauss() for _ in range(100)]} # Use the same data for both traces data = [samples, samples] diff --git a/doc/gui/examples/charts/histogram-normalized.py b/doc/gui/examples/charts/histogram-normalized.py index d9fae3bca9..188abcd4be 100644 --- a/doc/gui/examples/charts/histogram-normalized.py +++ b/doc/gui/examples/charts/histogram-normalized.py @@ -18,7 +18,7 @@ from taipy.gui import Gui # Random data set -data = [random.random() for i in range(100)] +data = [random.random() for _ in range(100)] # Normalize to show bin probabilities options = {"histnorm": "probability"} diff --git a/doc/gui/examples/charts/histogram-overlay.py b/doc/gui/examples/charts/histogram-overlay.py index 1752557e39..e33ca434d7 100644 --- a/doc/gui/examples/charts/histogram-overlay.py +++ b/doc/gui/examples/charts/histogram-overlay.py @@ -18,7 +18,10 @@ from taipy.gui import Gui # Data set made of two series of random numbers -data = [{"x": [random.random() + 1 for i in range(100)]}, {"x": [random.random() + 1.1 for i in range(100)]}] +data = [ + {"x": [random.random() + 1 for _ in range(100)]}, + {"x": [random.random() + 1.1 for _ in range(100)]}, +] options = [ # First data set displayed as semi-transparent, green bars diff --git a/doc/gui/examples/charts/histogram-simple.py b/doc/gui/examples/charts/histogram-simple.py index f2da24265f..0f04eb7f72 100644 --- a/doc/gui/examples/charts/histogram-simple.py +++ b/doc/gui/examples/charts/histogram-simple.py @@ -18,7 +18,7 @@ from taipy import Gui # Random data set -data = [random.gauss(0, 5) for i in range(1000)] +data = [random.gauss(0, 5) for _ in range(1000)] page = """ # Histogram - Simple diff --git a/doc/gui/examples/charts/histogram-stacked.py b/doc/gui/examples/charts/histogram-stacked.py index 9244fc96d6..9d05c6c740 100644 --- a/doc/gui/examples/charts/histogram-stacked.py +++ b/doc/gui/examples/charts/histogram-stacked.py @@ -18,7 +18,10 @@ from taipy.gui import Gui # Data set made of two series of random numbers -data = {"A": [random.random() for i in range(200)], "B": [random.random() for i in range(200)]} +data = { + "A": [random.random() for _ in range(200)], + "B": [random.random() for _ in range(200)], +} # Names of the two traces names = ["A samples", "B samples"] diff --git a/doc/gui/examples/charts/treemap-simple.py b/doc/gui/examples/charts/treemap-simple.py index 1279844aeb..02438cb010 100644 --- a/doc/gui/examples/charts/treemap-simple.py +++ b/doc/gui/examples/charts/treemap-simple.py @@ -18,10 +18,10 @@ # Data set: the first 10 elements of the Fibonacci sequence n_numbers = 10 fibonacci = [0, 1] -for i in range(2, n_numbers): - fibonacci.append(fibonacci[i - 1] + fibonacci[i - 2]) - -data = {"index": [i for i in range(1, n_numbers + 1)], "fibonacci": fibonacci} +fibonacci.extend( + fibonacci[i - 1] + fibonacci[i - 2] for i in range(2, n_numbers) +) +data = {"index": list(range(1, n_numbers + 1)), "fibonacci": fibonacci} page = """ # TreeMap - Simple diff --git a/taipy/config/__init__.py b/taipy/config/__init__.py index 2812333ad7..0b34e13b28 100644 --- a/taipy/config/__init__.py +++ b/taipy/config/__init__.py @@ -43,8 +43,11 @@ def func_with_doc(section, attribute_name, default, configuration_methods, add_t for exposed_configuration_method, configuration_method in configuration_methods: annotation = " @staticmethod\n" - sign = " def " + exposed_configuration_method + str(signature(configuration_method)) + ":\n" - doc = ' """' + configuration_method.__doc__ + '"""\n' + sign = ( + f" def {exposed_configuration_method}{str(signature(configuration_method))}" + + ":\n" + ) + doc = f' """{configuration_method.__doc__}' + '"""\n' content = " pass\n\n" f.write(annotation + sign + doc + content) return func(section, attribute_name, default, configuration_methods, add_to_unconflicted_sections) diff --git a/taipy/config/_config.py b/taipy/config/_config.py index d11d0a88cf..4b77e27a14 100644 --- a/taipy/config/_config.py +++ b/taipy/config/_config.py @@ -68,11 +68,9 @@ def __update_sections(self, entity_config, other_entity_configs): entity_config[self.DEFAULT_KEY] = other_entity_configs[self.DEFAULT_KEY] for cfg_id, sub_config in other_entity_configs.items(): if cfg_id != self.DEFAULT_KEY: - if cfg_id in entity_config: - entity_config[cfg_id]._update(sub_config._to_dict(), entity_config.get(self.DEFAULT_KEY)) - else: + if cfg_id not in entity_config: entity_config[cfg_id] = copy(sub_config) - entity_config[cfg_id]._update(sub_config._to_dict(), entity_config.get(self.DEFAULT_KEY)) + entity_config[cfg_id]._update(sub_config._to_dict(), entity_config.get(self.DEFAULT_KEY)) self.__point_nested_section_to_self(sub_config) def __point_nested_section_to_self(self, section): diff --git a/taipy/config/_serializer/_base_serializer.py b/taipy/config/_serializer/_base_serializer.py index de231f3769..ff6a129115 100644 --- a/taipy/config/_serializer/_base_serializer.py +++ b/taipy/config/_serializer/_base_serializer.py @@ -56,25 +56,25 @@ def _stringify(cls, as_dict): if as_dict is None: return None if isinstance(as_dict, Section): - return as_dict.id + ":SECTION" + return f"{as_dict.id}:SECTION" if isinstance(as_dict, Scope): - return as_dict.name + ":SCOPE" + return f"{as_dict.name}:SCOPE" if isinstance(as_dict, Frequency): - return as_dict.name + ":FREQUENCY" + return f"{as_dict.name}:FREQUENCY" if isinstance(as_dict, bool): - return str(as_dict) + ":bool" + return f"{str(as_dict)}:bool" if isinstance(as_dict, int): - return str(as_dict) + ":int" + return f"{str(as_dict)}:int" if isinstance(as_dict, float): - return str(as_dict) + ":float" + return f"{str(as_dict)}:float" if isinstance(as_dict, datetime): - return as_dict.isoformat() + ":datetime" + return f"{as_dict.isoformat()}:datetime" if isinstance(as_dict, timedelta): - return cls._timedelta_to_str(as_dict) + ":timedelta" + return f"{cls._timedelta_to_str(as_dict)}:timedelta" if inspect.isfunction(as_dict) or isinstance(as_dict, types.BuiltinFunctionType): - return as_dict.__module__ + "." + as_dict.__name__ + ":function" + return f"{as_dict.__module__}.{as_dict.__name__}:function" if inspect.isclass(as_dict): - return as_dict.__module__ + "." + as_dict.__qualname__ + ":class" + return f"{as_dict.__module__}.{as_dict.__qualname__}:class" if isinstance(as_dict, dict): return {str(key): cls._stringify(val) for key, val in as_dict.items()} if isinstance(as_dict, list): @@ -115,8 +115,7 @@ def _pythonify(cls, val): r"^(.+):(\bbool\b|\bstr\b|\bint\b|\bfloat\b|\bdatetime\b||\btimedelta\b|" r"\bfunction\b|\bclass\b|\bSCOPE\b|\bFREQUENCY\b|\bSECTION\b)?$" ) - match = re.fullmatch(TYPE_PATTERN, str(val)) - if match: + if match := re.fullmatch(TYPE_PATTERN, str(val)): actual_val = match.group(1) dynamic_type = match.group(2) if dynamic_type == "SECTION": diff --git a/taipy/config/checker/_checkers/_config_checker.py b/taipy/config/checker/_checkers/_config_checker.py index 9887424cc8..4d52a29c89 100644 --- a/taipy/config/checker/_checkers/_config_checker.py +++ b/taipy/config/checker/_checkers/_config_checker.py @@ -51,17 +51,16 @@ def _check_children( config_value, f"{config_key} field of {parent_config_class.__name__} `{config_id}` is empty.", ) - else: - if not ( - (isinstance(config_value, List) or isinstance(config_value, Set)) - and all(map(lambda x: isinstance(x, child_config_class), config_value)) - ): - self._error( - config_key, - config_value, - f"{config_key} field of {parent_config_class.__name__} `{config_id}` must be populated with a list " - f"of {child_config_class.__name__} objects.", - ) + elif not ( + (isinstance(config_value, (List, Set))) + and all(map(lambda x: isinstance(x, child_config_class), config_value)) + ): + self._error( + config_key, + config_value, + f"{config_key} field of {parent_config_class.__name__} `{config_id}` must be populated with a list " + f"of {child_config_class.__name__} objects.", + ) def _check_existing_config_id(self, config): if not config.id: diff --git a/taipy/config/common/_config_blocker.py b/taipy/config/common/_config_blocker.py index bf9ae4b9d0..ac9cb93d27 100644 --- a/taipy/config/common/_config_blocker.py +++ b/taipy/config/common/_config_blocker.py @@ -40,7 +40,7 @@ def _check_if_is_blocking(*args, **kwargs): " modifying the Configuration. For more information, please refer to:" " https://docs.taipy.io/en/latest/manuals/running_services/#running-core." ) - cls.__logger.error("ConfigurationUpdateBlocked: " + error_message) + cls.__logger.error(f"ConfigurationUpdateBlocked: {error_message}") raise ConfigurationUpdateBlocked(error_message) return f(*args, **kwargs) diff --git a/taipy/config/common/_template_handler.py b/taipy/config/common/_template_handler.py index 12273f6880..c404053342 100644 --- a/taipy/config/common/_template_handler.py +++ b/taipy/config/common/_template_handler.py @@ -43,8 +43,7 @@ def _replace_templates(cls, template, type=str, required=True, default=None): def _replace_template(cls, template, type, required, default): if "ENV" not in str(template): return template - match = re.fullmatch(cls._PATTERN, str(template)) - if match: + if match := re.fullmatch(cls._PATTERN, str(template)): var = match.group(1) dynamic_type = match.group(3) val = os.environ.get(var) @@ -77,7 +76,7 @@ def _to_bool(val: str) -> bool: possible_values = ["true", "false"] if str.lower(val) not in possible_values: raise InconsistentEnvVariableError("{val} is not a Boolean.") - return str.lower(val) == "true" or not (str.lower(val) == "false") + return str.lower(val) == "true" or str.lower(val) != "false" @staticmethod def _to_int(val: str) -> int: diff --git a/taipy/config/config.py b/taipy/config/config.py index 4b86aaa339..e6ede7c4f0 100644 --- a/taipy/config/config.py +++ b/taipy/config/config.py @@ -178,11 +178,10 @@ def _register_default(cls, default_section: Section): cls._default_config._unique_sections[default_section.name]._update(default_section._to_dict()) else: cls._default_config._unique_sections[default_section.name] = default_section + elif def_sections := cls._default_config._sections.get(default_section.name, None): + def_sections[default_section.id] = default_section else: - if def_sections := cls._default_config._sections.get(default_section.name, None): - def_sections[default_section.id] = default_section - else: - cls._default_config._sections[default_section.name] = {default_section.id: default_section} + cls._default_config._sections[default_section.name] = {default_section.id: default_section} cls._serializer._section_class[default_section.name] = default_section.__class__ # type: ignore cls.__json_serializer._section_class[default_section.name] = default_section.__class__ # type: ignore cls._compile_configs() @@ -195,14 +194,13 @@ def _register(cls, section): cls._python_config._unique_sections[section.name]._update(section._to_dict()) else: cls._python_config._unique_sections[section.name] = section - else: - if sections := cls._python_config._sections.get(section.name, None): - if sections.get(section.id, None): - sections[section.id]._update(section._to_dict()) - else: - sections[section.id] = section + elif sections := cls._python_config._sections.get(section.name, None): + if sections.get(section.id, None): + sections[section.id]._update(section._to_dict()) else: - cls._python_config._sections[section.name] = {section.id: section} + sections[section.id] = section + else: + cls._python_config._sections[section.name] = {section.id: section} cls._serializer._section_class[section.name] = section.__class__ cls.__json_serializer._section_class[section.name] = section.__class__ cls._compile_configs() diff --git a/taipy/core/_backup/_backup.py b/taipy/core/_backup/_backup.py index 825dd9d0cb..6bd4d34897 100644 --- a/taipy/core/_backup/_backup.py +++ b/taipy/core/_backup/_backup.py @@ -32,27 +32,32 @@ def _append_to_backup_file(new_file_path: str): def _remove_from_backup_file(to_remove_file_path: str): - if preserve_file_path := os.getenv(__BACKUP_FILE_PATH_ENVIRONMENT_VARIABLE_NAME, None): - storage_folder = os.path.abspath(Config.core.storage_folder) + os.sep - if not os.path.abspath(to_remove_file_path).startswith(storage_folder): - try: - with open(preserve_file_path, "r+") as f: - old_backup = f.read() - to_remove_file_path = to_remove_file_path + "\n" - - # To avoid removing the file path of different data nodes that are pointing - # to the same file. We will only replace the file path only once. - if old_backup.startswith(to_remove_file_path): - new_backup = old_backup.replace(to_remove_file_path, "", 1) - else: - new_backup = old_backup.replace("\n" + to_remove_file_path, "\n", 1) - - if new_backup is not old_backup: - f.seek(0) - f.write(new_backup) - f.truncate() - except Exception: - pass + if not ( + preserve_file_path := os.getenv( + __BACKUP_FILE_PATH_ENVIRONMENT_VARIABLE_NAME, None + ) + ): + return + storage_folder = os.path.abspath(Config.core.storage_folder) + os.sep + if not os.path.abspath(to_remove_file_path).startswith(storage_folder): + try: + with open(preserve_file_path, "r+") as f: + old_backup = f.read() + to_remove_file_path += "\n" + + # To avoid removing the file path of different data nodes that are pointing + # to the same file. We will only replace the file path only once. + if old_backup.startswith(to_remove_file_path): + new_backup = old_backup.replace(to_remove_file_path, "", 1) + else: + new_backup = old_backup.replace("\n" + to_remove_file_path, "\n", 1) + + if new_backup is not old_backup: + f.seek(0) + f.write(new_backup) + f.truncate() + except Exception: + pass def _replace_in_backup_file(old_file_path: str, new_file_path: str): diff --git a/taipy/core/_core_cli.py b/taipy/core/_core_cli.py index a828211934..10fa2f4409 100644 --- a/taipy/core/_core_cli.py +++ b/taipy/core/_core_cli.py @@ -113,6 +113,6 @@ def parse_arguments(cls): @classmethod def __add_taipy_prefix(cls, key: str): if key.startswith("--no-"): - return key[:5] + "taipy-" + key[5:] + return f"{key[:5]}taipy-{key[5:]}" - return key[:2] + "taipy-" + key[2:] + return f"{key[:2]}taipy-{key[2:]}" diff --git a/taipy/core/_entity/_dag.py b/taipy/core/_entity/_dag.py index cf27c2fa24..92ead2d85a 100644 --- a/taipy/core/_entity/_dag.py +++ b/taipy/core/_entity/_dag.py @@ -31,7 +31,7 @@ def __init__(self, src: _Node, dest: _Node): class _DAG: def __init__(self, dag: nx.DiGraph): - self._sorted_nodes = list(nodes for nodes in nx.topological_generations(dag)) + self._sorted_nodes = list(nx.topological_generations(dag)) self._length, self._width = self.__compute_size() self._grid_length, self._grid_width = self.__compute_grid_size() self._nodes = self.__compute_nodes() @@ -54,7 +54,7 @@ def edges(self) -> List[_Edge]: return self._edges def __compute_size(self) -> Tuple[int, int]: - return len(self._sorted_nodes), max([len(i) for i in self._sorted_nodes]) + return len(self._sorted_nodes), max(len(i) for i in self._sorted_nodes) def __compute_grid_size(self) -> Tuple[int, int]: if self._width == 1: @@ -65,8 +65,7 @@ def __compute_grid_size(self) -> Tuple[int, int]: def __compute_nodes(self) -> Dict[str, _Node]: nodes = {} - x = 0 - for same_lvl_nodes in self._sorted_nodes: + for x, same_lvl_nodes in enumerate(self._sorted_nodes): lcl_wdt = len(same_lvl_nodes) is_max = lcl_wdt != self.width if self.width != 1: @@ -77,14 +76,13 @@ def __compute_nodes(self) -> Dict[str, _Node]: for node in same_lvl_nodes: y += y_incr nodes[node.id] = _Node(node, x, y) - x += 1 return nodes def __compute_edges(self, dag) -> List[_Edge]: - edges = [] - for edge in dag.edges(): - edges.append(_Edge(self.nodes[edge[0].id], self.nodes[edge[1].id])) - return edges + return [ + _Edge(self.nodes[edge[0].id], self.nodes[edge[1].id]) + for edge in dag.edges() + ] @staticmethod def __lcm(*integers) -> int: diff --git a/taipy/core/_entity/_entity.py b/taipy/core/_entity/_entity.py index d1d0b3f43b..ccaec4ab20 100644 --- a/taipy/core/_entity/_entity.py +++ b/taipy/core/_entity/_entity.py @@ -22,7 +22,7 @@ class _Entity: def __enter__(self): self._is_in_context = True - self._in_context_attributes_changed_collector = list() + self._in_context_attributes_changed_collector = [] return self def __exit__(self, exc_type, exc_value, exc_traceback): diff --git a/taipy/core/_entity/_labeled.py b/taipy/core/_entity/_labeled.py index ff950f6a3b..c8fb4b05ca 100644 --- a/taipy/core/_entity/_labeled.py +++ b/taipy/core/_entity/_labeled.py @@ -57,9 +57,7 @@ def _get_explicit_label(self) -> Optional[str]: return None def _get_owner_id(self) -> Optional[str]: - if hasattr(self, "owner_id"): - return getattr(self, "owner_id") - return None + return getattr(self, "owner_id") if hasattr(self, "owner_id") else None def _get_name(self) -> Optional[str]: if hasattr(self, "name"): @@ -69,9 +67,7 @@ def _get_name(self) -> Optional[str]: return None def _get_config_id(self) -> Optional[str]: - if hasattr(self, "config_id"): - return getattr(self, "config_id") - return None + return getattr(self, "config_id") if hasattr(self, "config_id") else None def _generate_entity_label(self) -> str: if name := self._get_name(): diff --git a/taipy/core/_entity/_migrate/_utils.py b/taipy/core/_entity/_migrate/_utils.py index b53165a9aa..447e4cb752 100644 --- a/taipy/core/_entity/_migrate/_utils.py +++ b/taipy/core/_entity/_migrate/_utils.py @@ -48,8 +48,8 @@ def __search_parent_ids(entity_id: str, data: Dict) -> List: if entity_id in entity_data["input_ids"] or entity_id in entity_data["output_ids"]: parents.append(_id) - if entity_type == "TASK" and "SCENARIO" in _id: - if entity_id in entity_data["tasks"]: + if entity_id in entity_data["tasks"]: + if entity_type == "TASK" and "SCENARIO" in _id: parents.append(_id) parents.sort() return parents @@ -60,8 +60,8 @@ def __search_parent_config(entity_id: str, config: Dict, entity_type: str) -> Li possible_parents = "TASK" if entity_type == "DATA_NODE" else "SCENARIO" data = config[possible_parents] + section_id = f"{entity_id}:SECTION" for _id, entity_data in data.items(): - section_id = f"{entity_id}:SECTION" if entity_type == "DATANODE" and possible_parents == "TASK": if section_id in entity_data["input_ids"] or section_id in entity_data["output_ids"]: parents.append(section_id) @@ -281,7 +281,7 @@ def __migrate_entities(entity_type: str, data: Dict) -> Dict: _entities = {k: data[k] for k in data if entity_type in k} for k, v in _entities.items(): - if entity_type in ["JOB", "VERSION"]: + if entity_type in {"JOB", "VERSION"}: v["data"] = migration_fct(v["data"]) # type: ignore else: v["data"] = migration_fct(v["data"], data) # type: ignore diff --git a/taipy/core/_entity/_migrate_cli.py b/taipy/core/_entity/_migrate_cli.py index 994f870cff..fb60d7862b 100644 --- a/taipy/core/_entity/_migrate_cli.py +++ b/taipy/core/_entity/_migrate_cli.py @@ -77,7 +77,7 @@ def parse_arguments(cls): if args.remove_backup: cls.__handle_remove_backup(repository_type, repository_args) - do_backup = False if args.skip_backup else True + do_backup = not args.skip_backup cls.__migrate_entities(repository_type, repository_args, do_backup) sys.exit(0) @@ -124,14 +124,14 @@ def __migrate_entities(cls, repository_type: str, repository_args: List, do_back if not _migrate_fs_entities(path, do_backup): sys.exit(1) - elif repository_type == "sql": - if not _migrate_sql_entities(repository_args[0], do_backup): - sys.exit(1) - elif repository_type == "mongo": mongo_args = repository_args[1:5] if repository_args[0] else [] _migrate_mongo_entities(*mongo_args, backup=do_backup) # type: ignore + elif repository_type == "sql": + if not _migrate_sql_entities(repository_args[0], do_backup): + sys.exit(1) + else: cls.__logger.error(f"Unknown repository type {repository_type}") sys.exit(1) diff --git a/taipy/core/_entity/_reload.py b/taipy/core/_entity/_reload.py index 542bf4546a..bbec207d4e 100644 --- a/taipy/core/_entity/_reload.py +++ b/taipy/core/_entity/_reload.py @@ -21,10 +21,10 @@ class _Reloader: _no_reload_context = False - def __new__(class_, *args, **kwargs): - if not isinstance(class_._instance, class_): - class_._instance = object.__new__(class_, *args, **kwargs) - return class_._instance + def __new__(cls, *args, **kwargs): + if not isinstance(cls._instance, cls): + cls._instance = object.__new__(cls, *args, **kwargs) + return cls._instance def _reload(self, manager: str, obj): if self._no_reload_context: @@ -65,10 +65,7 @@ def __set_entity(fct): def _do_set_entity(self, *args, **kwargs): fct(self, *args, **kwargs) entity_manager = _get_manager(manager) - if len(args) == 1: - value = args[0] - else: - value = args + value = args[0] if len(args) == 1 else args event = _make_event( self, EventOperation.UPDATE, diff --git a/taipy/core/_entity/submittable.py b/taipy/core/_entity/submittable.py index 64841f1664..95c62d8701 100644 --- a/taipy/core/_entity/submittable.py +++ b/taipy/core/_entity/submittable.py @@ -33,7 +33,7 @@ class Submittable: """ def __init__(self, subscribers: Optional[List[_Subscriber]] = None): - self._subscribers = _ListAttributes(self, subscribers or list()) + self._subscribers = _ListAttributes(self, subscribers or []) @abc.abstractmethod def submit( @@ -129,7 +129,11 @@ def _get_sorted_tasks(self) -> List[List[Task]]: dag = self._build_dag() remove = [node for node, degree in dict(dag.in_degree).items() if degree == 0 and isinstance(node, DataNode)] dag.remove_nodes_from(remove) - return list(nodes for nodes in nx.topological_generations(dag) if (Task in (type(node) for node in nodes))) + return [ + nodes + for nodes in nx.topological_generations(dag) + if (Task in (type(node) for node in nodes)) + ] def _add_subscriber(self, callback: Callable, params: Optional[List[Any]] = None): params = [] if params is None else params @@ -138,8 +142,7 @@ def _add_subscriber(self, callback: Callable, params: Optional[List[Any]] = None def _remove_subscriber(self, callback: Callable, params: Optional[List[Any]] = None): if params is not None: self._subscribers.remove(_Subscriber(callback, params)) - else: - elem = [x for x in self._subscribers if x.callback == callback] - if not elem: - raise ValueError + elif elem := [x for x in self._subscribers if x.callback == callback]: self._subscribers.remove(elem[0]) + else: + raise ValueError diff --git a/taipy/core/_orchestrator/_dispatcher/_task_function_wrapper.py b/taipy/core/_orchestrator/_dispatcher/_task_function_wrapper.py index 8972dc35f7..8b5aa5236c 100644 --- a/taipy/core/_orchestrator/_dispatcher/_task_function_wrapper.py +++ b/taipy/core/_orchestrator/_dispatcher/_task_function_wrapper.py @@ -38,8 +38,7 @@ def __call__(self, **kwargs): def execute(self, **kwargs): """Execute the wrapped function. If `config_as_string` is given, then it will be reapplied to the config.""" try: - config_as_string = kwargs.pop("config_as_string", None) - if config_as_string: + if config_as_string := kwargs.pop("config_as_string", None): logger.info("Updating with given config.") Config._applied_config._update(_TomlSerializer()._deserialize(config_as_string)) Config.block_update() diff --git a/taipy/core/_orchestrator/_orchestrator.py b/taipy/core/_orchestrator/_orchestrator.py index aa810118ea..5369a7963c 100644 --- a/taipy/core/_orchestrator/_orchestrator.py +++ b/taipy/core/_orchestrator/_orchestrator.py @@ -77,26 +77,26 @@ def submit( tasks = submittable._get_sorted_tasks() with cls.lock: for ts in tasks: - for task in ts: - jobs.append( - cls._lock_dn_output_and_create_job( - task, - submission.id, - submission.entity_id, - callbacks=itertools.chain([submission._update_submission_status], callbacks or []), - force=force, # type: ignore - ) + jobs.extend( + cls._lock_dn_output_and_create_job( + task, + submission.id, + submission.entity_id, + callbacks=itertools.chain( + [submission._update_submission_status], callbacks or [] + ), + force=force, # type: ignore ) - + for task in ts + ) submission.jobs = jobs # type: ignore cls._orchestrate_job_to_run_or_block(jobs) if Config.job_config.is_development: cls._check_and_execute_jobs_if_development_mode() - else: - if wait: - cls.__wait_until_job_finished(jobs, timeout=timeout) + elif wait: + cls.__wait_until_job_finished(jobs, timeout=timeout) return jobs @@ -141,9 +141,8 @@ def submit_task( if Config.job_config.is_development: cls._check_and_execute_jobs_if_development_mode() - else: - if wait: - cls.__wait_until_job_finished(job, timeout=timeout) + elif wait: + cls.__wait_until_job_finished(job, timeout=timeout) return job @@ -158,12 +157,14 @@ def _lock_dn_output_and_create_job( ) -> Job: for dn in task.output.values(): dn.lock_edit() - job = _JobManagerFactory._build_manager()._create( - task, itertools.chain([cls._on_status_change], callbacks or []), submit_id, submit_entity_id, force=force + return _JobManagerFactory._build_manager()._create( + task, + itertools.chain([cls._on_status_change], callbacks or []), + submit_id, + submit_entity_id, + force=force, ) - return job - @classmethod def _orchestrate_job_to_run_or_block(cls, jobs: List[Job]): blocked_jobs = [] @@ -184,9 +185,7 @@ def _orchestrate_job_to_run_or_block(cls, jobs: List[Job]): @classmethod def __wait_until_job_finished(cls, jobs: Union[List[Job], Job], timeout: Optional[Union[float, int]] = None): def __check_if_timeout(start, timeout): - if timeout: - return (datetime.now() - start).seconds < timeout - return True + return (datetime.now() - start).seconds < timeout if timeout else True start = datetime.now() jobs = jobs if isinstance(jobs, Iterable) else [jobs] @@ -195,7 +194,7 @@ def __check_if_timeout(start, timeout): while __check_if_timeout(start, timeout) and index < len(jobs): try: if jobs[index]._is_finished(): - index = index + 1 + index += 1 else: sleep(0.5) # Limit CPU usage @@ -255,7 +254,7 @@ def cancel_job(cls, job: Job): cls.__logger.info(f"{job.id} has already failed and cannot be canceled.") else: with cls.lock: - to_cancel_or_abandon_jobs = set([job]) + to_cancel_or_abandon_jobs = {job} to_cancel_or_abandon_jobs.update(cls.__find_subsequent_jobs(job.submit_id, set(job.task.output.keys()))) cls.__remove_blocked_jobs(to_cancel_or_abandon_jobs) cls.__remove_jobs_to_run(to_cancel_or_abandon_jobs) @@ -271,7 +270,7 @@ def __find_subsequent_jobs(cls, submit_id, output_dn_config_ids: Set) -> Set[Job if job.submit_id == submit_id and len(output_dn_config_ids.intersection(job_input_dn_config_ids)) > 0: next_output_dn_config_ids.update(job.task.output.keys()) subsequent_jobs.update([job]) - if len(next_output_dn_config_ids) > 0: + if next_output_dn_config_ids: subsequent_jobs.update( cls.__find_subsequent_jobs(submit_id, output_dn_config_ids=next_output_dn_config_ids) ) @@ -316,11 +315,10 @@ def _cancel_jobs(cls, job_id_to_cancel: JobId, jobs: Set[Job]): cls.__logger.info(f"{job.id} has already been completed and cannot be canceled.") elif job.is_skipped(): cls.__logger.info(f"{job.id} has already been skipped and cannot be canceled.") + elif job_id_to_cancel == job.id: + job.canceled() else: - if job_id_to_cancel == job.id: - job.canceled() - else: - job.abandoned() + job.abandoned() @staticmethod def _check_and_execute_jobs_if_development_mode(): diff --git a/taipy/core/_repository/_base_taipy_model.py b/taipy/core/_repository/_base_taipy_model.py index 68fff281e3..c7254fbadc 100644 --- a/taipy/core/_repository/_base_taipy_model.py +++ b/taipy/core/_repository/_base_taipy_model.py @@ -24,8 +24,7 @@ class _BaseModel: __table__: Table def __iter__(self): - for attr, value in self.__dict__.items(): - yield attr, value + yield from self.__dict__.items() def to_dict(self) -> Dict[str, Any]: model_dict = {**dataclasses.asdict(self)} diff --git a/taipy/core/_repository/_encoder.py b/taipy/core/_repository/_encoder.py index ab48870bfe..513ec73d97 100644 --- a/taipy/core/_repository/_encoder.py +++ b/taipy/core/_repository/_encoder.py @@ -27,14 +27,13 @@ def _timedelta_to_str(self, obj: timedelta) -> str: def default(self, o: Any): if isinstance(o, Enum): - result = o.value + return o.value elif isinstance(o, datetime): - result = {"__type__": "Datetime", "__value__": o.isoformat()} + return {"__type__": "Datetime", "__value__": o.isoformat()} elif isinstance(o, timedelta): - result = {"__type__": "Timedelta", "__value__": self._timedelta_to_str(o)} + return {"__type__": "Timedelta", "__value__": self._timedelta_to_str(o)} else: - result = json.JSONEncoder.default(self, o) - return result + return json.JSONEncoder.default(self, o) def dumps(d): diff --git a/taipy/core/_repository/_filesystem_repository.py b/taipy/core/_repository/_filesystem_repository.py index d352e9316b..1cbe4ae52c 100644 --- a/taipy/core/_repository/_filesystem_repository.py +++ b/taipy/core/_repository/_filesystem_repository.py @@ -159,7 +159,7 @@ def _get_by_configs_and_owner_ids(self, configs_and_owner_ids, filters: Optional res[key] = entity configs_and_owner_ids.remove(key) - if len(configs_and_owner_ids) == 0: + if not configs_and_owner_ids: return res except FileNotFoundError: # Folder with data was not created yet. @@ -170,11 +170,7 @@ def _get_by_configs_and_owner_ids(self, configs_and_owner_ids, filters: Optional def _get_by_config_and_owner_id( self, config_id: str, owner_id: Optional[str], filters: Optional[List[Dict]] = None ) -> Optional[Entity]: - if not filters: - filters = [{}] - else: - filters = copy.deepcopy(filters) - + filters = [{}] if not filters else copy.deepcopy(filters) if owner_id is not None: for fil in filters: fil.update({"owner_id": owner_id}) @@ -228,8 +224,7 @@ def __file_content_to_entity(self, file_content): if isinstance(file_content, str): file_content = json.loads(file_content, cls=_Decoder) model = self.model_type.from_dict(file_content) - entity = self.converter._model_to_entity(model) - return entity + return self.converter._model_to_entity(model) def __filter_by(self, filepath: pathlib.Path, filters: Optional[List[Dict]]) -> Optional[Json]: if not filters: diff --git a/taipy/core/_repository/_sql_repository.py b/taipy/core/_repository/_sql_repository.py index e0d8e79d97..3317f0133b 100644 --- a/taipy/core/_repository/_sql_repository.py +++ b/taipy/core/_repository/_sql_repository.py @@ -130,11 +130,14 @@ def _export(self, entity_id: str, folder_path: Union[str, pathlib.Path]): query = self.table.select().filter_by(id=entity_id) - if entry := self.db.execute(str(query.compile(dialect=sqlite.dialect())), [entity_id]).fetchone(): - with open(export_path, "w", encoding="utf-8") as export_file: - export_file.write(json.dumps(entry)) - else: + if not ( + entry := self.db.execute( + str(query.compile(dialect=sqlite.dialect())), [entity_id] + ).fetchone() + ): raise ModelNotFound(self.model_type, entity_id) # type: ignore + with open(export_path, "w", encoding="utf-8") as export_file: + export_file.write(json.dumps(entry)) ########################################### # ## Specific or optimized methods ## # @@ -165,8 +168,9 @@ def _get_by_configs_and_owner_ids(self, configs_and_owner_ids, filters: Optional configs_and_owner_ids = set(configs_and_owner_ids) for config, owner in configs_and_owner_ids: - entry = self.__get_entities_by_config_and_owner(config.id, owner, filters) - if entry: + if entry := self.__get_entities_by_config_and_owner( + config.id, owner, filters + ): entity = self.converter._model_to_entity(entry) key = config, owner res[key] = entity @@ -190,7 +194,7 @@ def __get_entities_by_config_and_owner( if versions: table_name = self.table.name - query = query + f" AND {table_name}.version IN ({','.join(['?']*len(versions))})" + query += f" AND {table_name}.version IN ({','.join(['?'] * len(versions))})" parameters.extend(versions) if entry := self.db.execute(query, parameters).fetchone(): diff --git a/taipy/core/_repository/db/_sql_connection.py b/taipy/core/_repository/db/_sql_connection.py index 1127cc4e3f..9d7442cbe7 100644 --- a/taipy/core/_repository/db/_sql_connection.py +++ b/taipy/core/_repository/db/_sql_connection.py @@ -22,10 +22,7 @@ def dict_factory(cursor, row): - d = {} - for idx, col in enumerate(cursor.description): - d[col[0]] = row[idx] - return d + return {col[0]: row[idx] for idx, col in enumerate(cursor.description)} class _SQLConnection: diff --git a/taipy/core/_version/_utils.py b/taipy/core/_version/_utils.py index 369ddd9be2..a11f548895 100644 --- a/taipy/core/_version/_utils.py +++ b/taipy/core/_version/_utils.py @@ -43,8 +43,11 @@ def __get_migration_fcts_to_latest(source_version: str, config_id: str) -> List[ versions_to_migrate = production_versions[start_index:] for version in versions_to_migrate: - migration_fct = Config.unique_sections[MigrationConfig.name].migration_fcts.get(version, {}).get(config_id) - if migration_fct: + if ( + migration_fct := Config.unique_sections[MigrationConfig.name] + .migration_fcts.get(version, {}) + .get(config_id) + ): migration_fcts_to_latest.append(migration_fct) return migration_fcts_to_latest diff --git a/taipy/core/_version/_version_manager.py b/taipy/core/_version/_version_manager.py index 99cf0ee04c..32647260a3 100644 --- a/taipy/core/_version/_version_manager.py +++ b/taipy/core/_version/_version_manager.py @@ -54,14 +54,13 @@ def _get_or_create(cls, id: str, force: bool) -> _Version: if version := cls._get(id): comparator_result = Config._comparator._find_conflict_config(version.config, Config._applied_config, id) if comparator_result.get(_ComparatorResult.CONFLICTED_SECTION_KEY): - if force: - cls.__logger.warning( - f"Option --force is detected, overriding the configuration of version {id} ..." - ) - version.config = Config._applied_config - else: + if not force: raise ConflictedConfigurationError() + cls.__logger.warning( + f"Option --force is detected, overriding the configuration of version {id} ..." + ) + version.config = Config._applied_config else: version = _Version(id=id, config=Config._applied_config) @@ -212,16 +211,16 @@ def _manage_version(cls): raise SystemExit(f"Undefined execution mode: {Config.core.mode}.") @classmethod - def __check_production_migration_config(self): + def __check_production_migration_config(cls): from ..config.checkers._migration_config_checker import _MigrationConfigChecker collector = _MigrationConfigChecker(Config._applied_config, IssueCollector())._check() for issue in collector._warnings: - self.__logger.warning(str(issue)) + cls.__logger.warning(str(issue)) for issue in collector._infos: - self.__logger.info(str(issue)) + cls.__logger.info(str(issue)) for issue in collector._errors: - self.__logger.error(str(issue)) + cls.__logger.error(str(issue)) if len(collector._errors) != 0: raise SystemExit("Configuration errors found. Please check the error log for more information.") diff --git a/taipy/core/_version/_version_manager_factory.py b/taipy/core/_version/_version_manager_factory.py index b417a04e0c..ea91ea62b0 100644 --- a/taipy/core/_version/_version_manager_factory.py +++ b/taipy/core/_version/_version_manager_factory.py @@ -24,11 +24,13 @@ class _VersionManagerFactory(_ManagerFactory): def _build_manager(cls) -> _VersionManager: # type: ignore if cls._using_enterprise(): version_manager = _utils._load_fct( - cls._TAIPY_ENTERPRISE_CORE_MODULE + "._version._version_manager", "_VersionManager" - ) # type: ignore + f"{cls._TAIPY_ENTERPRISE_CORE_MODULE}._version._version_manager", + "_VersionManager", + ) build_repository = _utils._load_fct( - cls._TAIPY_ENTERPRISE_CORE_MODULE + "._version._version_manager_factory", "_VersionManagerFactory" - )._build_repository # type: ignore + f"{cls._TAIPY_ENTERPRISE_CORE_MODULE}._version._version_manager_factory", + "_VersionManagerFactory", + )._build_repository else: version_manager = _VersionManager build_repository = cls._build_repository diff --git a/taipy/core/_version/_version_mixin.py b/taipy/core/_version/_version_mixin.py index 8f3a5c4175..e8ec8b1391 100644 --- a/taipy/core/_version/_version_mixin.py +++ b/taipy/core/_version/_version_mixin.py @@ -28,10 +28,11 @@ def __fetch_version_number(cls, version_number): @classmethod def _build_filters_with_version(cls, version_number) -> List[Dict]: - filters = [] - if versions := cls.__fetch_version_number(version_number): - filters = [{"version": version} for version in versions] - return filters + return ( + [{"version": version} for version in versions] + if (versions := cls.__fetch_version_number(version_number)) + else [] + ) @classmethod def _get_latest_version(cls): diff --git a/taipy/core/common/_mongo_connector.py b/taipy/core/common/_mongo_connector.py index bb85daa70f..e6781cc2ed 100644 --- a/taipy/core/common/_mongo_connector.py +++ b/taipy/core/common/_mongo_connector.py @@ -38,7 +38,7 @@ def _connect_mongodb( extra_args_str = "&".join(f"{k}={str(v)}" for k, v in db_extra_args) if extra_args_str: - extra_args_str = "/?" + extra_args_str + extra_args_str = f"/?{extra_args_str}" driver = "mongodb" if db_driver: diff --git a/taipy/core/common/_utils.py b/taipy/core/common/_utils.py index 25ec7a4897..73affe29e9 100644 --- a/taipy/core/common/_utils.py +++ b/taipy/core/common/_utils.py @@ -52,10 +52,7 @@ def newfn(*args, **kwargs): @functools.lru_cache def _get_fct_name(f) -> Optional[str]: - # Mock function does not have __qualname__ attribute -> return __name__ - # Partial or anonymous function does not have __name__ or __qualname__ attribute -> return None - name = getattr(f, "__qualname__", getattr(f, "__name__", None)) - return name + return getattr(f, "__qualname__", getattr(f, "__name__", None)) def _fct_to_dict(obj): @@ -66,14 +63,14 @@ def _fct_to_dict(obj): callback = obj.callback params = obj.params - fct_name = _get_fct_name(callback) - if not fct_name: + if fct_name := _get_fct_name(callback): + return { + "fct_name": fct_name, + "fct_params": params, + "fct_module": callback.__module__, + } + else: return None - return { - "fct_name": fct_name, - "fct_params": params, - "fct_module": callback.__module__, - } def _fcts_to_dict(objs): diff --git a/taipy/core/config/checkers/_config_id_checker.py b/taipy/core/config/checkers/_config_id_checker.py index 896aaff103..9bc4ae703a 100644 --- a/taipy/core/config/checkers/_config_id_checker.py +++ b/taipy/core/config/checkers/_config_id_checker.py @@ -24,7 +24,7 @@ def _check(self) -> IssueCollector: existing_config_ids: Dict[str, List[str]] = dict() for entity_type, section_dictionary in self._config._sections.items(): for config_id in section_dictionary.keys(): - if config_id in existing_config_ids.keys(): + if config_id in existing_config_ids: existing_config_ids[config_id].append(entity_type) else: existing_config_ids[config_id] = [entity_type] diff --git a/taipy/core/config/checkers/_data_node_config_checker.py b/taipy/core/config/checkers/_data_node_config_checker.py index ec11d62eb5..862ee15edf 100644 --- a/taipy/core/config/checkers/_data_node_config_checker.py +++ b/taipy/core/config/checkers/_data_node_config_checker.py @@ -66,95 +66,102 @@ def _check_validity_period(self, data_node_config_id: str, data_node_config: Dat ) def _check_required_properties(self, data_node_config_id: str, data_node_config: DataNodeConfig): - if storage_type := data_node_config.storage_type: - if storage_type in DataNodeConfig._REQUIRED_PROPERTIES: - required_properties = DataNodeConfig._REQUIRED_PROPERTIES[storage_type] + if not (storage_type := data_node_config.storage_type): + return + if storage_type in DataNodeConfig._REQUIRED_PROPERTIES: + required_properties = DataNodeConfig._REQUIRED_PROPERTIES[storage_type] + if data_node_config.properties: if storage_type == DataNodeConfig._STORAGE_TYPE_VALUE_SQL: - if data_node_config.properties: - if engine := data_node_config.properties.get(DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY): - if engine == DataNodeConfig._DB_ENGINE_SQLITE: - required_properties = [ - DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY, - DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY, - DataNodeConfig._REQUIRED_READ_QUERY_SQL_PROPERTY, - DataNodeConfig._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY, - ] - else: - required_properties = [ - DataNodeConfig._OPTIONAL_DB_USERNAME_SQL_PROPERTY, - DataNodeConfig._OPTIONAL_DB_PASSWORD_SQL_PROPERTY, - DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY, - DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY, - DataNodeConfig._REQUIRED_READ_QUERY_SQL_PROPERTY, - DataNodeConfig._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY, - ] + if engine := data_node_config.properties.get(DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY): + required_properties = ( + [ + DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY, + DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY, + DataNodeConfig._REQUIRED_READ_QUERY_SQL_PROPERTY, + DataNodeConfig._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY, + ] + if engine == DataNodeConfig._DB_ENGINE_SQLITE + else [ + DataNodeConfig._OPTIONAL_DB_USERNAME_SQL_PROPERTY, + DataNodeConfig._OPTIONAL_DB_PASSWORD_SQL_PROPERTY, + DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY, + DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY, + DataNodeConfig._REQUIRED_READ_QUERY_SQL_PROPERTY, + DataNodeConfig._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY, + ] + ) + if data_node_config.properties: if storage_type == DataNodeConfig._STORAGE_TYPE_VALUE_SQL_TABLE: - if data_node_config.properties: - if engine := data_node_config.properties.get(DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY): - if engine == DataNodeConfig._DB_ENGINE_SQLITE: - required_properties = [ - DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY, - DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY, - DataNodeConfig._REQUIRED_TABLE_NAME_SQL_TABLE_PROPERTY, - ] - else: - required_properties = [ - DataNodeConfig._OPTIONAL_DB_USERNAME_SQL_PROPERTY, - DataNodeConfig._OPTIONAL_DB_PASSWORD_SQL_PROPERTY, - DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY, - DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY, - DataNodeConfig._REQUIRED_TABLE_NAME_SQL_TABLE_PROPERTY, - ] - for required_property in required_properties: - if not data_node_config.properties or required_property not in data_node_config.properties: - if data_node_config_id == DataNodeConfig._DEFAULT_KEY: - self._warning( - required_property, - None, - f"DataNodeConfig `{data_node_config_id}` is missing the required " - f"property `{required_property}` for type `{storage_type}`.", - ) + if engine := data_node_config.properties.get(DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY): + if engine == DataNodeConfig._DB_ENGINE_SQLITE: + required_properties = [ + DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY, + DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY, + DataNodeConfig._REQUIRED_TABLE_NAME_SQL_TABLE_PROPERTY, + ] else: - self._error( - required_property, - None, - f"DataNodeConfig `{data_node_config_id}` is missing the required " - f"property `{required_property}` for type `{storage_type}`.", - ) - - def _check_generic_read_write_fct_and_args(self, data_node_config_id: str, data_node_config: DataNodeConfig): - if data_node_config.storage_type == DataNodeConfig._STORAGE_TYPE_VALUE_GENERIC: - properties_to_check = [ - DataNodeConfig._OPTIONAL_READ_FUNCTION_ARGS_GENERIC_PROPERTY, - DataNodeConfig._OPTIONAL_WRITE_FUNCTION_ARGS_GENERIC_PROPERTY, - ] - for prop_key in properties_to_check: - if data_node_config.properties and prop_key in data_node_config.properties: - prop_value = data_node_config.properties[prop_key] - if not isinstance(prop_value, list): + required_properties = [ + DataNodeConfig._OPTIONAL_DB_USERNAME_SQL_PROPERTY, + DataNodeConfig._OPTIONAL_DB_PASSWORD_SQL_PROPERTY, + DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY, + DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY, + DataNodeConfig._REQUIRED_TABLE_NAME_SQL_TABLE_PROPERTY, + ] + for required_property in required_properties: + if not data_node_config.properties or required_property not in data_node_config.properties: + if data_node_config_id == DataNodeConfig._DEFAULT_KEY: + self._warning( + required_property, + None, + f"DataNodeConfig `{data_node_config_id}` is missing the required " + f"property `{required_property}` for type `{storage_type}`.", + ) + else: self._error( - prop_key, - prop_value, - f"`{prop_key}` field of DataNodeConfig" - f" `{data_node_config_id}` must be populated with a List value.", + required_property, + None, + f"DataNodeConfig `{data_node_config_id}` is missing the required " + f"property `{required_property}` for type `{storage_type}`.", ) - if data_node_config_id != DataNodeConfig._DEFAULT_KEY: - properties_to_check_at_least_one = [ - DataNodeConfig._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY, - DataNodeConfig._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY, - ] - has_at_least_one = False - for prop_key in properties_to_check_at_least_one: - if data_node_config.properties and prop_key in data_node_config.properties: - has_at_least_one = True - if not has_at_least_one: + + def _check_generic_read_write_fct_and_args(self, data_node_config_id: str, data_node_config: DataNodeConfig): + if ( + data_node_config.storage_type + != DataNodeConfig._STORAGE_TYPE_VALUE_GENERIC + ): + return + properties_to_check = [ + DataNodeConfig._OPTIONAL_READ_FUNCTION_ARGS_GENERIC_PROPERTY, + DataNodeConfig._OPTIONAL_WRITE_FUNCTION_ARGS_GENERIC_PROPERTY, + ] + for prop_key in properties_to_check: + if data_node_config.properties and prop_key in data_node_config.properties: + prop_value = data_node_config.properties[prop_key] + if not isinstance(prop_value, list): self._error( - ", ".join(properties_to_check_at_least_one), - None, - f"Either `{DataNodeConfig._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY}` field or " - f"`{DataNodeConfig._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY}` field of " - f"DataNodeConfig `{data_node_config_id}` must be populated with a Callable function.", + prop_key, + prop_value, + f"`{prop_key}` field of DataNodeConfig" + f" `{data_node_config_id}` must be populated with a List value.", ) + if data_node_config_id != DataNodeConfig._DEFAULT_KEY: + properties_to_check_at_least_one = [ + DataNodeConfig._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY, + DataNodeConfig._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY, + ] + has_at_least_one = any( + data_node_config.properties + and prop_key in data_node_config.properties + for prop_key in properties_to_check_at_least_one + ) + if not has_at_least_one: + self._error( + ", ".join(properties_to_check_at_least_one), + None, + f"Either `{DataNodeConfig._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY}` field or " + f"`{DataNodeConfig._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY}` field of " + f"DataNodeConfig `{data_node_config_id}` must be populated with a Callable function.", + ) def _check_callable(self, data_node_config_id: str, data_node_config: DataNodeConfig): properties_to_check = { @@ -168,7 +175,7 @@ def _check_callable(self, data_node_config_id: str, data_node_config: DataNodeCo ], } - if data_node_config.storage_type in properties_to_check.keys(): + if data_node_config.storage_type in properties_to_check: for prop_key in properties_to_check[data_node_config.storage_type]: prop_value = data_node_config.properties.get(prop_key) if data_node_config.properties else None if prop_value and not callable(prop_value): diff --git a/taipy/core/config/checkers/_scenario_config_checker.py b/taipy/core/config/checkers/_scenario_config_checker.py index 981363e59b..23108dc064 100644 --- a/taipy/core/config/checkers/_scenario_config_checker.py +++ b/taipy/core/config/checkers/_scenario_config_checker.py @@ -115,10 +115,11 @@ def _check_additional_dns_not_overlapping_tasks_dns(self, scenario_config_id: st def _check_tasks_in_sequences_exist_in_scenario_tasks( self, scenario_config_id: str, scenario_config: ScenarioConfig ): - scenario_task_ids = set() - for task_config in scenario_config.tasks: - if isinstance(task_config, TaskConfig): - scenario_task_ids.add(task_config.id) + scenario_task_ids = { + task_config.id + for task_config in scenario_config.tasks + if isinstance(task_config, TaskConfig) + } for sequence_tasks in scenario_config.sequences.values(): self._check_children( ScenarioConfig, diff --git a/taipy/core/config/checkers/_task_config_checker.py b/taipy/core/config/checkers/_task_config_checker.py index a4f8b4c483..dae13a4524 100644 --- a/taipy/core/config/checkers/_task_config_checker.py +++ b/taipy/core/config/checkers/_task_config_checker.py @@ -49,11 +49,10 @@ def _check_existing_function(self, task_config_id: str, task_config: TaskConfig) task_config.function, f"{task_config._FUNCTION} field of TaskConfig `{task_config_id}` is empty.", ) - else: - if not callable(task_config.function): - self._error( - task_config._FUNCTION, - task_config.function, - f"{task_config._FUNCTION} field of TaskConfig `{task_config_id}` must be" - f" populated with Callable value.", - ) + elif not callable(task_config.function): + self._error( + task_config._FUNCTION, + task_config.function, + f"{task_config._FUNCTION} field of TaskConfig `{task_config_id}` must be" + f" populated with Callable value.", + ) diff --git a/taipy/core/config/data_node_config.py b/taipy/core/config/data_node_config.py index c4f493dc84..dcf5ef74c3 100644 --- a/taipy/core/config/data_node_config.py +++ b/taipy/core/config/data_node_config.py @@ -299,10 +299,7 @@ def validity_period(self, val): def cacheable(self): _warn_deprecated("cacheable", suggest="the skippable feature") cacheable = self._properties.get("cacheable") - if cacheable is not None: - return _tpl._replace_templates(cacheable) - else: - return False + return _tpl._replace_templates(cacheable) if cacheable is not None else False @cacheable.setter # type: ignore @_ConfigBlocker._check() @@ -454,20 +451,20 @@ def _configure( Returns: The new data node configuration. """ - configuration_map: Dict[str, Callable] = { - cls._STORAGE_TYPE_VALUE_PICKLE: cls._configure_pickle, - cls._STORAGE_TYPE_VALUE_SQL_TABLE: cls._configure_sql_table, - cls._STORAGE_TYPE_VALUE_SQL: cls._configure_sql, - cls._STORAGE_TYPE_VALUE_MONGO_COLLECTION: cls._configure_mongo_collection, - cls._STORAGE_TYPE_VALUE_CSV: cls._configure_csv, - cls._STORAGE_TYPE_VALUE_EXCEL: cls._configure_excel, - cls._STORAGE_TYPE_VALUE_IN_MEMORY: cls._configure_in_memory, - cls._STORAGE_TYPE_VALUE_GENERIC: cls._configure_generic, - cls._STORAGE_TYPE_VALUE_JSON: cls._configure_json, - cls._STORAGE_TYPE_VALUE_PARQUET: cls._configure_parquet, - } - if storage_type in cls._ALL_STORAGE_TYPES: + configuration_map: Dict[str, Callable] = { + cls._STORAGE_TYPE_VALUE_PICKLE: cls._configure_pickle, + cls._STORAGE_TYPE_VALUE_SQL_TABLE: cls._configure_sql_table, + cls._STORAGE_TYPE_VALUE_SQL: cls._configure_sql, + cls._STORAGE_TYPE_VALUE_MONGO_COLLECTION: cls._configure_mongo_collection, + cls._STORAGE_TYPE_VALUE_CSV: cls._configure_csv, + cls._STORAGE_TYPE_VALUE_EXCEL: cls._configure_excel, + cls._STORAGE_TYPE_VALUE_IN_MEMORY: cls._configure_in_memory, + cls._STORAGE_TYPE_VALUE_GENERIC: cls._configure_generic, + cls._STORAGE_TYPE_VALUE_JSON: cls._configure_json, + cls._STORAGE_TYPE_VALUE_PARQUET: cls._configure_parquet, + } + return configuration_map[storage_type](id=id, scope=scope, validity_period=validity_period, **properties) return cls.__configure(id, storage_type, scope, validity_period, **properties) diff --git a/taipy/core/config/job_config.py b/taipy/core/config/job_config.py index d8607979a3..c5e02c0c23 100644 --- a/taipy/core/config/job_config.py +++ b/taipy/core/config/job_config.py @@ -66,8 +66,7 @@ def _to_dict(self): @classmethod def _from_dict(cls, config_as_dict: Dict[str, Any], id=None, config: Optional[_Config] = None): mode = config_as_dict.pop(cls._MODE_KEY, None) - job_config = JobConfig(mode, **config_as_dict) - return job_config + return JobConfig(mode, **config_as_dict) def _update(self, as_dict: Dict[str, Any], default_section=None): mode = _tpl._replace_templates(as_dict.pop(self._MODE_KEY, self.mode)) diff --git a/taipy/core/config/scenario_config.py b/taipy/core/config/scenario_config.py index 2add956c1e..e6e161275a 100644 --- a/taipy/core/config/scenario_config.py +++ b/taipy/core/config/scenario_config.py @@ -90,7 +90,7 @@ def __init__( def __copy__(self): comp = None if self.comparators is None else self.comparators - scenario_config = ScenarioConfig( + return ScenarioConfig( self.id, copy(self._tasks), copy(self._additional_data_nodes), @@ -99,7 +99,6 @@ def __copy__(self): copy(self.sequences), **copy(self._properties), ) - return scenario_config def __getattr__(self, item: str) -> Optional[Any]: return _tpl._replace_templates(self._properties.get(item)) @@ -138,11 +137,11 @@ def __get_all_unique_data_nodes(self) -> List[DataNodeConfig]: @classmethod def default_config(cls): - return ScenarioConfig(cls._DEFAULT_KEY, list(), list(), None, dict()) + return ScenarioConfig(cls._DEFAULT_KEY, [], [], None, dict()) def _clean(self): - self._tasks = list() - self._additional_data_nodes = list() + self._tasks = [] + self._additional_data_nodes = [] self.frequency = None self.comparators = dict() self.sequences = dict() @@ -164,9 +163,9 @@ def _from_dict( ) -> "ScenarioConfig": # type: ignore as_dict.pop(cls._ID_KEY, id) - tasks = cls.__get_task_configs(as_dict.pop(cls._TASKS_KEY, list()), config) + tasks = cls.__get_task_configs(as_dict.pop(cls._TASKS_KEY, []), config) - additional_data_node_ids = as_dict.pop(cls._ADDITIONAL_DATA_NODES_KEY, list()) + additional_data_node_ids = as_dict.pop(cls._ADDITIONAL_DATA_NODES_KEY, []) additional_data_nodes = cls.__get_additional_data_node_configs(additional_data_node_ids, config) frequency = as_dict.pop(cls._FREQUENCY_KEY, None) @@ -176,7 +175,7 @@ def _from_dict( for sequence_name, sequence_tasks in sequences.items(): sequences[sequence_name] = cls.__get_task_configs(sequence_tasks, config) - scenario_config = ScenarioConfig( + return ScenarioConfig( id=id, tasks=tasks, additional_data_nodes=additional_data_nodes, @@ -186,8 +185,6 @@ def _from_dict( **as_dict, ) - return scenario_config - @staticmethod def __get_task_configs(task_config_ids: List[str], config: Optional[_Config]): task_configs = set() diff --git a/taipy/core/cycle/_cycle_manager_factory.py b/taipy/core/cycle/_cycle_manager_factory.py index 04673b0c3b..865573c69a 100644 --- a/taipy/core/cycle/_cycle_manager_factory.py +++ b/taipy/core/cycle/_cycle_manager_factory.py @@ -26,11 +26,13 @@ class _CycleManagerFactory(_ManagerFactory): def _build_manager(cls) -> Type[_CycleManager]: # type: ignore if cls._using_enterprise(): cycle_manager = _load_fct( - cls._TAIPY_ENTERPRISE_CORE_MODULE + ".cycle._cycle_manager", "_CycleManager" - ) # type: ignore + f"{cls._TAIPY_ENTERPRISE_CORE_MODULE}.cycle._cycle_manager", + "_CycleManager", + ) build_repository = _load_fct( - cls._TAIPY_ENTERPRISE_CORE_MODULE + ".cycle._cycle_manager_factory", "_CycleManagerFactory" - )._build_repository # type: ignore + f"{cls._TAIPY_ENTERPRISE_CORE_MODULE}.cycle._cycle_manager_factory", + "_CycleManagerFactory", + )._build_repository else: cycle_manager = _CycleManager build_repository = cls._build_repository diff --git a/taipy/core/cycle/cycle.py b/taipy/core/cycle/cycle.py index 102a197a9e..2dc2f300f7 100644 --- a/taipy/core/cycle/cycle.py +++ b/taipy/core/cycle/cycle.py @@ -141,10 +141,10 @@ def _get_valid_filename(name: str) -> str: """ Source: https://github.com/django/django/blob/main/django/utils/text.py """ - s = str(name).strip().replace(" ", "_") + s = name.strip().replace(" ", "_") s = re.sub(r"(?u)[^-\w.]", "", s) if s in {"", ".", ".."}: - raise _SuspiciousFileOperation("Could not derive file name from '%s'" % name) + raise _SuspiciousFileOperation(f"Could not derive file name from '{name}'") s = str(s).strip().replace(" ", "_") return re.sub(r"(?u)[^-\w.]", "", s) @@ -154,7 +154,9 @@ def __getattr__(self, attribute_name): protected_attribute_name = attribute_name if protected_attribute_name in self._properties: return self._properties[protected_attribute_name] - raise AttributeError(f"{attribute_name} is not an attribute of cycle {self.id}") + raise AttributeError( + f"{protected_attribute_name} is not an attribute of cycle {self.id}" + ) def __eq__(self, other): return self.id == other.id diff --git a/taipy/core/data/_abstract_sql.py b/taipy/core/data/_abstract_sql.py index b3015f734f..6e3d741aad 100644 --- a/taipy/core/data/_abstract_sql.py +++ b/taipy/core/data/_abstract_sql.py @@ -152,7 +152,7 @@ def _check_required_properties(self, properties: Dict): if missing := set(required) - set(properties.keys()): raise MissingRequiredProperty( - f"The following properties " f"{', '.join(x for x in missing)} were not informed and are required." + f"The following properties {', '.join(missing)} were not informed and are required." ) def _get_engine(self): diff --git a/taipy/core/data/_data_converter.py b/taipy/core/data/_data_converter.py index 0744020672..3eeb65eeb8 100644 --- a/taipy/core/data/_data_converter.py +++ b/taipy/core/data/_data_converter.py @@ -87,7 +87,10 @@ def __serialize_sql_dn_properties(cls, datanode_properties: dict) -> dict: @classmethod def __serialize_mongo_collection_dn_model_properties(cls, datanode_properties: dict) -> dict: - if MongoCollectionDataNode._CUSTOM_DOCUMENT_PROPERTY in datanode_properties.keys(): + if ( + MongoCollectionDataNode._CUSTOM_DOCUMENT_PROPERTY + in datanode_properties + ): datanode_properties[MongoCollectionDataNode._CUSTOM_DOCUMENT_PROPERTY] = ( f"{datanode_properties[MongoCollectionDataNode._CUSTOM_DOCUMENT_PROPERTY].__module__}." f"{datanode_properties[MongoCollectionDataNode._CUSTOM_DOCUMENT_PROPERTY].__qualname__}" @@ -242,7 +245,10 @@ def __deserialize_sql_dn_model_properties(cls, datanode_model_properties: dict) @classmethod def __deserialize_mongo_collection_dn_model_properties(cls, datanode_model_properties: dict) -> dict: - if MongoCollectionDataNode._CUSTOM_DOCUMENT_PROPERTY in datanode_model_properties.keys(): + if ( + MongoCollectionDataNode._CUSTOM_DOCUMENT_PROPERTY + in datanode_model_properties + ): if isinstance(datanode_model_properties[MongoCollectionDataNode._CUSTOM_DOCUMENT_PROPERTY], str): datanode_model_properties[MongoCollectionDataNode._CUSTOM_DOCUMENT_PROPERTY] = locate( datanode_model_properties[MongoCollectionDataNode._CUSTOM_DOCUMENT_PROPERTY] diff --git a/taipy/core/data/_data_manager.py b/taipy/core/data/_data_manager.py index ee5fc885de..c1bbeedfb5 100644 --- a/taipy/core/data/_data_manager.py +++ b/taipy/core/data/_data_manager.py @@ -136,8 +136,7 @@ def _remove_dn_file_paths_in_backup_file(cls, data_nodes: Iterable[DataNode]): @classmethod def _delete(cls, data_node_id: DataNodeId): - data_node = cls._get(data_node_id, None) - if data_node: + if data_node := cls._get(data_node_id, None): cls._clean_pickle_file(data_node) cls._remove_dn_file_path_in_backup_file(data_node) super()._delete(data_node_id) diff --git a/taipy/core/data/_data_manager_factory.py b/taipy/core/data/_data_manager_factory.py index 8da25bd04d..1212c2b6c1 100644 --- a/taipy/core/data/_data_manager_factory.py +++ b/taipy/core/data/_data_manager_factory.py @@ -26,11 +26,13 @@ class _DataManagerFactory(_ManagerFactory): def _build_manager(cls) -> Type[_DataManager]: # type: ignore if cls._using_enterprise(): data_manager = _load_fct( - cls._TAIPY_ENTERPRISE_CORE_MODULE + ".data._data_manager", "_DataManager" - ) # type: ignore + f"{cls._TAIPY_ENTERPRISE_CORE_MODULE}.data._data_manager", + "_DataManager", + ) build_repository = _load_fct( - cls._TAIPY_ENTERPRISE_CORE_MODULE + ".data._data_manager_factory", "_DataManagerFactory" - )._build_repository # type: ignore + f"{cls._TAIPY_ENTERPRISE_CORE_MODULE}.data._data_manager_factory", + "_DataManagerFactory", + )._build_repository else: data_manager = _DataManager build_repository = cls._build_repository diff --git a/taipy/core/data/_filter.py b/taipy/core/data/_filter.py index bcadba90b2..05bb954df2 100644 --- a/taipy/core/data/_filter.py +++ b/taipy/core/data/_filter.py @@ -26,14 +26,19 @@ class _FilterDataNode: @staticmethod def __is_pandas_object(data) -> bool: - return isinstance(data, (pd.DataFrame, modin_pd.DataFrame)) or isinstance(data, (pd.Series, modin_pd.DataFrame)) + return isinstance( + data, (pd.DataFrame, modin_pd.DataFrame, pd.Series, modin_pd.DataFrame) + ) @staticmethod def __is_multi_sheet_excel(data) -> bool: if isinstance(data, Dict): - has_df_children = all([isinstance(e, (pd.DataFrame, modin_pd.DataFrame)) for e in data.values()]) - has_list_children = all([isinstance(e, List) for e in data.values()]) - has_np_array_children = all([isinstance(e, np.ndarray) for e in data.values()]) + has_df_children = all( + isinstance(e, (pd.DataFrame, modin_pd.DataFrame)) + for e in data.values() + ) + has_list_children = all(isinstance(e, List) for e in data.values()) + has_np_array_children = all(isinstance(e, np.ndarray) for e in data.values()) return has_df_children or has_list_children or has_np_array_children return False @@ -82,7 +87,7 @@ def __getitem_dataframe(data, key: Union[pd.DataFrame, modin_pd.DataFrame]): if _FilterDataNode.__is_pandas_object(data): return data[key] if _FilterDataNode.__is_list_of_dict(data): - filtered_data = list() + filtered_data = [] for i, row in key.iterrows(): filtered_row = dict() for col in row.index: @@ -101,10 +106,10 @@ def __getitem_bool_indexer(data, key): def __getitem_iterable(data, keys): if _FilterDataNode.__is_pandas_object(data): return data[keys] - filtered_data = [] - for entry in data: - filtered_data.append({k: getattr(entry, k) for k in keys if hasattr(entry, k)}) - return filtered_data + return [ + {k: getattr(entry, k) for k in keys if hasattr(entry, k)} + for entry in data + ] @staticmethod def _filter(data, operators: Union[List, Tuple], join_operator=JoinOperator.AND): @@ -114,7 +119,7 @@ def _filter(data, operators: Union[List, Tuple], join_operator=JoinOperator.AND) if isinstance(data, Dict): return {k: _FilterDataNode._filter(v, operators, join_operator) for k, v in data.items()} - if not ((isinstance(operators[0], list)) or (isinstance(operators[0], tuple))): + if not (isinstance(operators[0], (list, tuple))): if isinstance(data, (pd.DataFrame, modin_pd.DataFrame)): return _FilterDataNode.__filter_dataframe_per_key_value(data, operators[0], operators[1], operators[2]) if isinstance(data, np.ndarray): @@ -135,20 +140,23 @@ def _filter(data, operators: Union[List, Tuple], join_operator=JoinOperator.AND) def __filter_dataframe( df_data: Union[pd.DataFrame, modin_pd.DataFrame], operators: Union[List, Tuple], join_operator=JoinOperator.AND ): - filtered_df_data = [] if join_operator == JoinOperator.AND: how = "inner" elif join_operator == JoinOperator.OR: how = "outer" else: return NotImplementedError - for key, value, operator in operators: - filtered_df_data.append(_FilterDataNode.__filter_dataframe_per_key_value(df_data, key, value, operator)) - + filtered_df_data = [ + _FilterDataNode.__filter_dataframe_per_key_value( + df_data, key, value, operator + ) + for key, value, operator in operators + ] if isinstance(df_data, modin_pd.DataFrame): if filtered_df_data: return _FilterDataNode.__modin_dataframe_merge(filtered_df_data, how) - return modin_pd.DataFrame() + else: + return modin_pd.DataFrame() return _FilterDataNode.__dataframe_merge(filtered_df_data, how) if filtered_df_data else pd.DataFrame() @@ -181,10 +189,12 @@ def __modin_dataframe_merge(df_list: List, how="inner"): @staticmethod def __filter_numpy_array(data: np.ndarray, operators: Union[List, Tuple], join_operator=JoinOperator.AND): - conditions = [] - for key, value, operator in operators: - conditions.append(_FilterDataNode.__get_filter_condition_per_key_value(data, key, value, operator)) - + conditions = [ + _FilterDataNode.__get_filter_condition_per_key_value( + data, key, value, operator + ) + for key, value, operator in operators + ] if join_operator == JoinOperator.AND: join_conditions = reduce(and_, conditions) elif join_operator == JoinOperator.OR: @@ -216,10 +226,13 @@ def __get_filter_condition_per_key_value(array_data: np.ndarray, key, value, ope @staticmethod def __filter_list(list_data: List, operators: Union[List, Tuple], join_operator=JoinOperator.AND): - filtered_list_data = [] - for key, value, operator in operators: - filtered_list_data.append(_FilterDataNode.__filter_list_per_key_value(list_data, key, value, operator)) - if len(filtered_list_data) == 0: + filtered_list_data = [ + _FilterDataNode.__filter_list_per_key_value( + list_data, key, value, operator + ) + for key, value, operator in operators + ] + if not filtered_list_data: return filtered_list_data if join_operator == JoinOperator.AND: return _FilterDataNode.__list_intersect(filtered_list_data) diff --git a/taipy/core/data/csv.py b/taipy/core/data/csv.py index dd32bcfee9..a0cd804fc8 100644 --- a/taipy/core/data/csv.py +++ b/taipy/core/data/csv.py @@ -183,17 +183,15 @@ def _read(self): def _read_as(self): custom_class = self.properties[self.__EXPOSED_TYPE_PROPERTY] with open(self._path, encoding=self.properties[self.__ENCODING_KEY]) as csvFile: - res = list() + res = [] if self.properties[self.__HAS_HEADER_PROPERTY]: reader = csv.DictReader(csvFile) - for line in reader: - res.append(custom_class(**line)) + res.extend(custom_class(**line) for line in reader) else: reader = csv.reader( csvFile, ) - for line in reader: - res.append(custom_class(*line)) + res.extend(custom_class(*line) for line in reader) return res def _read_as_numpy(self) -> np.ndarray: @@ -204,15 +202,20 @@ def _read_as_pandas_dataframe( ) -> pd.DataFrame: try: if self.properties[self.__HAS_HEADER_PROPERTY]: - if column_names: - return pd.read_csv(self._path, encoding=self.properties[self.__ENCODING_KEY])[column_names] - return pd.read_csv(self._path, encoding=self.properties[self.__ENCODING_KEY]) - else: - if usecols: - return pd.read_csv( - self._path, encoding=self.properties[self.__ENCODING_KEY], header=None, usecols=usecols + return ( + pd.read_csv( + self._path, encoding=self.properties[self.__ENCODING_KEY] + )[column_names] + if column_names + else pd.read_csv( + self._path, encoding=self.properties[self.__ENCODING_KEY] ) - return pd.read_csv(self._path, encoding=self.properties[self.__ENCODING_KEY], header=None) + ) + if usecols: + return pd.read_csv( + self._path, encoding=self.properties[self.__ENCODING_KEY], header=None, usecols=usecols + ) + return pd.read_csv(self._path, encoding=self.properties[self.__ENCODING_KEY], header=None) except pd.errors.EmptyDataError: return pd.DataFrame() @@ -221,15 +224,20 @@ def _read_as_modin_dataframe( ) -> modin_pd.DataFrame: try: if self.properties[self.__HAS_HEADER_PROPERTY]: - if column_names: - return modin_pd.read_csv(self._path, encoding=self.properties[self.__ENCODING_KEY])[column_names] - return modin_pd.read_csv(self._path, encoding=self.properties[self.__ENCODING_KEY]) - else: - if usecols: - return modin_pd.read_csv( - self._path, header=None, usecols=usecols, encoding=self.properties[self.__ENCODING_KEY] + return ( + modin_pd.read_csv( + self._path, encoding=self.properties[self.__ENCODING_KEY] + )[column_names] + if column_names + else modin_pd.read_csv( + self._path, encoding=self.properties[self.__ENCODING_KEY] ) - return modin_pd.read_csv(self._path, header=None, encoding=self.properties[self.__ENCODING_KEY]) + ) + if usecols: + return modin_pd.read_csv( + self._path, header=None, usecols=usecols, encoding=self.properties[self.__ENCODING_KEY] + ) + return modin_pd.read_csv(self._path, header=None, encoding=self.properties[self.__ENCODING_KEY]) except pd.errors.EmptyDataError: return modin_pd.DataFrame() @@ -255,9 +263,6 @@ def write_with_column_names(self, data: Any, columns: Optional[List[str]] = None columns (Optional[List[str]]): The list of column names to write. job_id (JobId^): An optional identifier of the writer. """ - if not columns: - df = pd.DataFrame(data) - else: - df = pd.DataFrame(data, columns=columns) + df = pd.DataFrame(data) if not columns else pd.DataFrame(data, columns=columns) df.to_csv(self._path, index=False, encoding=self.properties[self.__ENCODING_KEY]) self.track_edit(timestamp=datetime.now(), job_id=job_id) diff --git a/taipy/core/data/data_node.py b/taipy/core/data/data_node.py index a5ece4a37f..4eb44cf09a 100644 --- a/taipy/core/data/data_node.py +++ b/taipy/core/data/data_node.py @@ -116,7 +116,7 @@ def __init__( self._editor_expiration_date: Optional[datetime] = editor_expiration_date # Track edits - self._edits = edits or list() + self._edits = edits or [] self._properties = _Properties(self, **kwargs) @@ -144,9 +144,7 @@ def get_last_edit(self) -> Optional[Edit]: Returns: None if there has been no `Edit^` on this data node. """ - if self._edits: - return self._edits[-1] - return None + return self._edits[-1] if self._edits else None @property # type: ignore @_self_reload(_MANAGER_NAME) @@ -186,12 +184,11 @@ def validity_period(self, val): @_self_reload(_MANAGER_NAME) def expiration_date(self) -> datetime: """Datetime instant of the expiration date of this data node.""" - last_edit_date = self.last_edit_date - validity_period = self._validity_period - - if not last_edit_date: + if not (last_edit_date := self.last_edit_date): raise NoData(f"Data node {self.id} from config {self.config_id} has not been written yet.") + validity_period = self._validity_period + return last_edit_date + validity_period if validity_period else last_edit_date @property # type: ignore @@ -371,10 +368,7 @@ def track_edit(self, **options): options (dict[str, any)): track `timestamp`, `comments`, `job_id`. The others are user-custom, users can use options to attach any information to an external edit of a data node. """ - edit = {} - for k, v in options.items(): - if v is not None: - edit[k] = v + edit = {k: v for k, v in options.items() if v is not None} if "timestamp" not in edit: edit["timestamp"] = datetime.now() self.last_edit_date = edit.get("timestamp") @@ -420,10 +414,9 @@ def unlock_edit(self, editor_id: Optional[str] = None): and self.editor_expiration_date > datetime.now() ): raise DataNodeIsBeingEdited(self.id, self._editor_id) - else: - self.editor_id = None # type: ignore - self.editor_expiration_date = None # type: ignore - self.edit_in_progress = False # type: ignore + self.editor_id = None # type: ignore + self.editor_expiration_date = None # type: ignore + self.edit_in_progress = False # type: ignore def filter(self, operators: Union[List, Tuple], join_operator=JoinOperator.AND): """Read and filter the data referenced by this data node. @@ -471,12 +464,7 @@ def is_ready_for_reading(self) -> bool: False if the data is locked for modification or if the data has never been written. True otherwise. """ - if self._edit_in_progress: - return False - if not self._last_edit_date: - # Never been written so it is not up-to-date - return False - return True + return False if self._edit_in_progress else bool(self._last_edit_date) @property # type: ignore @_self_reload(_MANAGER_NAME) @@ -493,10 +481,7 @@ def is_valid(self) -> bool: if not self._validity_period: # No validity period and has already been written, so it is valid return True - if datetime.now() > self.expiration_date: - # expiration_date has been passed - return False - return True + return datetime.now() <= self.expiration_date @property def is_up_to_date(self) -> bool: diff --git a/taipy/core/data/excel.py b/taipy/core/data/excel.py index 1c7c945df4..aaf9426529 100644 --- a/taipy/core/data/excel.py +++ b/taipy/core/data/excel.py @@ -239,9 +239,7 @@ def _read_as(self): work_books[sheet_name] = self._read_as_pandas_dataframe(sheet_name) continue - res = list() - for row in work_sheet.rows: - res.append([col.value for col in row]) + res = [[col.value for col in row] for row in work_sheet.rows] if self.properties[self.__HAS_HEADER_PROPERTY] and res: header = res.pop(0) for i, row in enumerate(res): @@ -342,7 +340,8 @@ def __append_excel_with_multiple_sheets(self, data: Any, columns: List[str] = No def _append(self, data: Any): if isinstance(data, Dict) and all( - [isinstance(x, (pd.DataFrame, modin_pd.DataFrame, np.ndarray)) for x in data.values()] + isinstance(x, (pd.DataFrame, modin_pd.DataFrame, np.ndarray)) + for x in data.values() ): self.__append_excel_with_multiple_sheets(data) elif isinstance(data, (pd.DataFrame, modin_pd.DataFrame)): @@ -351,8 +350,7 @@ def _append(self, data: Any): self.__append_excel_with_single_sheet(pd.DataFrame(data).to_excel, index=False, header=False) def __write_excel_with_single_sheet(self, write_excel_fct, *args, **kwargs): - sheet_name = self.properties.get(self.__SHEET_NAME_PROPERTY) - if sheet_name: + if sheet_name := self.properties.get(self.__SHEET_NAME_PROPERTY): if not isinstance(sheet_name, str): if len(sheet_name) > 1: raise SheetNameLengthMismatch @@ -378,7 +376,8 @@ def __write_excel_with_multiple_sheets(self, data: Any, columns: List[str] = Non def _write(self, data: Any): if isinstance(data, Dict) and all( - [isinstance(x, (pd.DataFrame, modin_pd.DataFrame, np.ndarray)) for x in data.values()] + isinstance(x, (pd.DataFrame, modin_pd.DataFrame, np.ndarray)) + for x in data.values() ): self.__write_excel_with_multiple_sheets(data) elif isinstance(data, (pd.DataFrame, modin_pd.DataFrame)): @@ -395,7 +394,8 @@ def write_with_column_names(self, data: Any, columns: List[str] = None, job_id: job_id (JobId^): An optional identifier of the writer. """ if isinstance(data, Dict) and all( - [isinstance(x, (pd.DataFrame, modin_pd.DataFrame, np.ndarray)) for x in data.values()] + isinstance(x, (pd.DataFrame, modin_pd.DataFrame, np.ndarray)) + for x in data.values() ): self.__write_excel_with_multiple_sheets(data, columns=columns) else: diff --git a/taipy/core/data/generic.py b/taipy/core/data/generic.py index 2983b26d5e..a2d3ba4070 100644 --- a/taipy/core/data/generic.py +++ b/taipy/core/data/generic.py @@ -81,14 +81,13 @@ def __init__( properties = {} if missing := set(self._REQUIRED_PROPERTIES) - set(properties.keys()): raise MissingRequiredProperty( - f"The following properties " f"{', '.join(x for x in missing)} were not informed and are required." + f"The following properties {', '.join(missing)} were not informed and are required." ) missing_optional_fcts = set(self._REQUIRED_AT_LEAST_ONE_PROPERTY) - set(properties.keys()) if len(missing_optional_fcts) == len(self._REQUIRED_AT_LEAST_ONE_PROPERTY): raise MissingRequiredProperty( - f"None of the following properties " - f"{', '.join(x for x in missing)} were informed and at least one must be populated." + f"None of the following properties {', '.join(missing)} were informed and at least one must be populated." ) for missing_optional_fct in missing_optional_fcts: properties[missing_optional_fct] = None diff --git a/taipy/core/data/mongo.py b/taipy/core/data/mongo.py index c5abd03046..80d1c4807e 100644 --- a/taipy/core/data/mongo.py +++ b/taipy/core/data/mongo.py @@ -104,7 +104,7 @@ def __init__( required = self._REQUIRED_PROPERTIES if missing := set(required) - set(properties.keys()): raise MissingRequiredProperty( - f"The following properties " f"{', '.join(x for x in missing)} were not informed and are required." + f"The following properties {', '.join(missing)} were not informed and are required." ) self._check_custom_document(properties[self._CUSTOM_DOCUMENT_PROPERTY]) diff --git a/taipy/core/exceptions/exceptions.py b/taipy/core/exceptions/exceptions.py index 30d2e8e0c8..3b94eff1da 100644 --- a/taipy/core/exceptions/exceptions.py +++ b/taipy/core/exceptions/exceptions.py @@ -85,7 +85,7 @@ class DataNodeIsBeingEdited(Exception): """Raised if a DataNode is being edited.""" def __init__(self, data_node_id: str, editor_id: Optional[str] = None): - self.message = f"DataNode {data_node_id} is being edited{ ' by ' + editor_id if editor_id else ''}." + self.message = f"DataNode {data_node_id} is being edited{f' by {editor_id}' if editor_id else ''}." class NonExistingDataNodeConfig(Exception): diff --git a/taipy/core/job/_job_manager.py b/taipy/core/job/_job_manager.py index 91f3151aa4..12df64135e 100644 --- a/taipy/core/job/_job_manager.py +++ b/taipy/core/job/_job_manager.py @@ -77,18 +77,13 @@ def _cancel(cls, job: Union[str, Job]): @classmethod def _get_latest(cls, task: Task) -> Optional[Job]: - jobs_of_task = list(filter(lambda job: task in job, cls._get_all())) - if len(jobs_of_task) == 0: - return None - if len(jobs_of_task) == 1: - return jobs_of_task[0] + if jobs_of_task := list(filter(lambda job: task in job, cls._get_all())): + return jobs_of_task[0] if len(jobs_of_task) == 1 else max(jobs_of_task) else: - return max(jobs_of_task) + return None @classmethod def _is_deletable(cls, job: Union[Job, JobId]) -> bool: if isinstance(job, str): job = cls._get(job) - if job.is_finished(): - return True - return False + return bool(job.is_finished()) diff --git a/taipy/core/job/_job_manager_factory.py b/taipy/core/job/_job_manager_factory.py index 5f1cd8dd69..1a8c3cdf00 100644 --- a/taipy/core/job/_job_manager_factory.py +++ b/taipy/core/job/_job_manager_factory.py @@ -26,11 +26,13 @@ class _JobManagerFactory(_ManagerFactory): def _build_manager(cls) -> Type[_JobManager]: # type: ignore if cls._using_enterprise(): job_manager = _load_fct( - cls._TAIPY_ENTERPRISE_CORE_MODULE + ".job._job_manager", "_JobManager" - ) # type: ignore + f"{cls._TAIPY_ENTERPRISE_CORE_MODULE}.job._job_manager", + "_JobManager", + ) build_repository = _load_fct( - cls._TAIPY_ENTERPRISE_CORE_MODULE + ".job._job_manager_factory", "_JobManagerFactory" - )._build_repository # type: ignore + f"{cls._TAIPY_ENTERPRISE_CORE_MODULE}.job._job_manager_factory", + "_JobManagerFactory", + )._build_repository else: job_manager = _JobManager build_repository = cls._build_repository diff --git a/taipy/core/notification/_topic.py b/taipy/core/notification/_topic.py index a7a4f073e5..24c95546b8 100644 --- a/taipy/core/notification/_topic.py +++ b/taipy/core/notification/_topic.py @@ -55,11 +55,9 @@ def __hash__(self): return hash((self.entity_type, self.entity_id, self.operation, self.attribute_name)) def __eq__(self, __value) -> bool: - if ( + return ( self.entity_type == __value.entity_type and self.entity_id == __value.entity_id and self.operation == __value.operation and self.attribute_name == __value.attribute_name - ): - return True - return False + ) diff --git a/taipy/core/notification/event.py b/taipy/core/notification/event.py index 92940c6292..78dc0f2b44 100644 --- a/taipy/core/notification/event.py +++ b/taipy/core/notification/event.py @@ -49,7 +49,11 @@ class EventEntityType(_ReprEnum): SUBMISSION = 7 -_NO_ATTRIBUTE_NAME_OPERATIONS = set([EventOperation.CREATION, EventOperation.DELETION, EventOperation.SUBMISSION]) +_NO_ATTRIBUTE_NAME_OPERATIONS = { + EventOperation.CREATION, + EventOperation.DELETION, + EventOperation.SUBMISSION, +} _UNSUBMITTABLE_ENTITY_TYPES = ( EventEntityType.CYCLE, EventEntityType.DATA_NODE, diff --git a/taipy/core/notification/notifier.py b/taipy/core/notification/notifier.py index 25557bd6c7..24b27dc2cc 100644 --- a/taipy/core/notification/notifier.py +++ b/taipy/core/notification/notifier.py @@ -156,6 +156,8 @@ def _is_matching(event: Event, topic: _Topic) -> bool: return False if topic.operation is not None and event.operation != topic.operation: return False - if topic.attribute_name is not None and event.attribute_name and event.attribute_name != topic.attribute_name: - return False - return True + return ( + topic.attribute_name is None + or not event.attribute_name + or event.attribute_name == topic.attribute_name + ) diff --git a/taipy/core/scenario/_scenario_converter.py b/taipy/core/scenario/_scenario_converter.py index 8fe52a2a7f..d2e9797e5c 100644 --- a/taipy/core/scenario/_scenario_converter.py +++ b/taipy/core/scenario/_scenario_converter.py @@ -26,16 +26,21 @@ class _ScenarioConverter(_AbstractConverter): @classmethod def _entity_to_model(cls, scenario: Scenario) -> _ScenarioModel: - sequences: Dict[str, Dict[str, Union[List[TaskId], Dict, List]]] = {} - for p_name, sequence_data in scenario._sequences.items(): - sequences[p_name] = { + sequences: Dict[str, Dict[str, Union[List[TaskId], Dict, List]]] = { + p_name: { Scenario._SEQUENCE_TASKS_KEY: [ - t.id if isinstance(t, Task) else t for t in sequence_data.get("tasks", []) + t.id if isinstance(t, Task) else t + for t in sequence_data.get("tasks", []) ], - Scenario._SEQUENCE_PROPERTIES_KEY: sequence_data.get("properties", {}), - Scenario._SEQUENCE_SUBSCRIBERS_KEY: _utils._fcts_to_dict(sequence_data.get("subscribers", [])), + Scenario._SEQUENCE_PROPERTIES_KEY: sequence_data.get( + "properties", {} + ), + Scenario._SEQUENCE_SUBSCRIBERS_KEY: _utils._fcts_to_dict( + sequence_data.get("subscribers", []) + ), } - + for p_name, sequence_data in scenario._sequences.items() + } return _ScenarioModel( id=scenario.id, config_id=scenario.config_id, @@ -56,9 +61,7 @@ def _entity_to_model(cls, scenario: Scenario) -> _ScenarioModel: @classmethod def _model_to_entity(cls, model: _ScenarioModel) -> Scenario: - tasks: Union[Set[TaskId], Set[Task], Set] = set() - if model.tasks: - tasks = set(model.tasks) + tasks = set(model.tasks) if model.tasks else set() if model.sequences: for sequence_name, sequence_data in model.sequences.items(): if subscribers := sequence_data.get(Scenario._SEQUENCE_SUBSCRIBERS_KEY): diff --git a/taipy/core/scenario/_scenario_manager.py b/taipy/core/scenario/_scenario_manager.py index c2c2b35032..046af0d020 100644 --- a/taipy/core/scenario/_scenario_manager.py +++ b/taipy/core/scenario/_scenario_manager.py @@ -141,7 +141,7 @@ def _create( sequence_tasks.append(task) else: non_existing_sequence_task_config_in_scenario_config.add(sequence_task_config.id) - if len(non_existing_sequence_task_config_in_scenario_config) > 0: + if non_existing_sequence_task_config_in_scenario_config: raise SequenceTaskConfigDoesNotExistInSameScenarioConfig( list(non_existing_sequence_task_config_in_scenario_config), sequence_name, str(config.id) ) @@ -182,7 +182,7 @@ def _create( raise InvalidSscenario(scenario.id) actual_sequences = scenario._get_sequences() - for sequence_name in sequences.keys(): + for sequence_name in sequences: if not actual_sequences[sequence_name]._is_consistent(): raise InvalidSequence(actual_sequences[sequence_name].id) Notifier.publish(_make_event(actual_sequences[sequence_name], EventOperation.CREATION)) @@ -230,26 +230,18 @@ def __get_status_notifier_callbacks(cls, scenario: Scenario) -> List: @classmethod def _get_primary(cls, cycle: Cycle) -> Optional[Scenario]: scenarios = cls._get_all_by_cycle(cycle) - for scenario in scenarios: - if scenario.is_primary: - return scenario - return None + return next((scenario for scenario in scenarios if scenario.is_primary), None) @classmethod def _get_by_tag(cls, cycle: Cycle, tag: str) -> Optional[Scenario]: scenarios = cls._get_all_by_cycle(cycle) - for scenario in scenarios: - if scenario.has_tag(tag): - return scenario - return None + return next( + (scenario for scenario in scenarios if scenario.has_tag(tag)), None + ) @classmethod def _get_all_by_tag(cls, tag: str) -> List[Scenario]: - scenarios = [] - for scenario in cls._get_all(): - if scenario.has_tag(tag): - scenarios.append(scenario) - return scenarios + return [scenario for scenario in cls._get_all() if scenario.has_tag(tag)] @classmethod def _get_all_by_cycle(cls, cycle: Cycle) -> List[Scenario]: @@ -263,32 +255,25 @@ def _get_all_by_cycle(cls, cycle: Cycle) -> List[Scenario]: @classmethod def _get_primary_scenarios(cls) -> List[Scenario]: - primary_scenarios = [] - for scenario in cls._get_all(): - if scenario.is_primary: - primary_scenarios.append(scenario) - return primary_scenarios + return [scenario for scenario in cls._get_all() if scenario.is_primary] @classmethod def _is_promotable_to_primary(cls, scenario: Union[Scenario, ScenarioId]) -> bool: if isinstance(scenario, str): scenario = cls._get(scenario) - if scenario and not scenario.is_primary and scenario.cycle: - return True - return False + return bool(scenario and not scenario.is_primary and scenario.cycle) @classmethod def _set_primary(cls, scenario: Scenario): - if scenario.cycle: - primary_scenario = cls._get_primary(scenario.cycle) - # To prevent SAME scenario updating out of Context Manager - if primary_scenario and primary_scenario != scenario: - primary_scenario.is_primary = False # type: ignore - scenario.is_primary = True # type: ignore - else: + if not scenario.cycle: raise DoesNotBelongToACycle( f"Can't set scenario {scenario.id} to primary because it doesn't belong to a cycle." ) + primary_scenario = cls._get_primary(scenario.cycle) + # To prevent SAME scenario updating out of Context Manager + if primary_scenario and primary_scenario != scenario: + primary_scenario.is_primary = False # type: ignore + scenario.is_primary = True # type: ignore @classmethod def _tag(cls, scenario: Scenario, tag: str): @@ -296,8 +281,7 @@ def _tag(cls, scenario: Scenario, tag: str): if len(tags) > 0 and tag not in tags: raise UnauthorizedTagError(f"Tag `{tag}` not authorized by scenario configuration `{scenario.config_id}`") if scenario.cycle: - old_tagged_scenario = cls._get_by_tag(scenario.cycle, tag) - if old_tagged_scenario: + if old_tagged_scenario := cls._get_by_tag(scenario.cycle, tag): old_tagged_scenario.remove_tag(tag) cls._set(old_tagged_scenario) scenario._add_tag(tag) @@ -319,29 +303,29 @@ def _compare(cls, *scenarios: Scenario, data_node_config_id: Optional[str] = Non if len(scenarios) < 2: raise InsufficientScenarioToCompare("At least two scenarios are required to compare.") - if not all(scenarios[0].config_id == scenario.config_id for scenario in scenarios): + if any( + scenarios[0].config_id != scenario.config_id for scenario in scenarios + ): raise DifferentScenarioConfigs("Scenarios to compare must have the same configuration.") - if scenario_config := _ScenarioManager.__get_config(scenarios[0]): - results = {} - if data_node_config_id: - if data_node_config_id in scenario_config.comparators.keys(): - dn_comparators = {data_node_config_id: scenario_config.comparators[data_node_config_id]} - else: - raise NonExistingComparator(f"Data node config {data_node_config_id} has no comparator.") + if not (scenario_config := _ScenarioManager.__get_config(scenarios[0])): + raise NonExistingScenarioConfig(scenarios[0].config_id) + results = {} + if data_node_config_id: + if data_node_config_id in scenario_config.comparators.keys(): + dn_comparators = {data_node_config_id: scenario_config.comparators[data_node_config_id]} else: - dn_comparators = scenario_config.comparators - - for data_node_config_id, comparators in dn_comparators.items(): - data_nodes = [scenario.__getattr__(data_node_config_id).read() for scenario in scenarios] - results[data_node_config_id] = { - comparator.__name__: comparator(*data_nodes) for comparator in comparators - } + raise NonExistingComparator(f"Data node config {data_node_config_id} has no comparator.") + else: + dn_comparators = scenario_config.comparators - return results + for data_node_config_id, comparators in dn_comparators.items(): + data_nodes = [scenario.__getattr__(data_node_config_id).read() for scenario in scenarios] + results[data_node_config_id] = { + comparator.__name__: comparator(*data_nodes) for comparator in comparators + } - else: - raise NonExistingScenarioConfig(scenarios[0].config_id) + return results @staticmethod def __get_config(scenario: Scenario): diff --git a/taipy/core/scenario/_scenario_manager_factory.py b/taipy/core/scenario/_scenario_manager_factory.py index d82b54be20..2a7afef560 100644 --- a/taipy/core/scenario/_scenario_manager_factory.py +++ b/taipy/core/scenario/_scenario_manager_factory.py @@ -26,11 +26,13 @@ class _ScenarioManagerFactory(_ManagerFactory): def _build_manager(cls) -> Type[_ScenarioManager]: # type: ignore if cls._using_enterprise(): scenario_manager = _load_fct( - cls._TAIPY_ENTERPRISE_CORE_MODULE + ".scenario._scenario_manager", "_ScenarioManager" - ) # type: ignore + f"{cls._TAIPY_ENTERPRISE_CORE_MODULE}.scenario._scenario_manager", + "_ScenarioManager", + ) build_repository = _load_fct( - cls._TAIPY_ENTERPRISE_CORE_MODULE + ".scenario._scenario_manager_factory", "_ScenarioManagerFactory" - )._build_repository # type: ignore + f"{cls._TAIPY_ENTERPRISE_CORE_MODULE}.scenario._scenario_manager_factory", + "_ScenarioManagerFactory", + )._build_repository else: scenario_manager = _ScenarioManager build_repository = cls._build_repository diff --git a/taipy/core/scenario/scenario.py b/taipy/core/scenario/scenario.py index 1422644cff..ac2c9dd1c6 100644 --- a/taipy/core/scenario/scenario.py +++ b/taipy/core/scenario/scenario.py @@ -109,11 +109,14 @@ def __init__( self._properties = _Properties(self, **properties) self._sequences: Dict[str, Dict] = sequences or {} - _scenario_task_ids = set([task.id if isinstance(task, Task) else task for task in self._tasks]) + _scenario_task_ids = { + task.id if isinstance(task, Task) else task for task in self._tasks + } for sequence_name, sequence_data in self._sequences.items(): - sequence_task_ids = set( - [task.id if isinstance(task, Task) else task for task in sequence_data.get("tasks", [])] - ) + sequence_task_ids = { + task.id if isinstance(task, Task) else task + for task in sequence_data.get("tasks", []) + } self.__check_sequence_tasks_exist_in_scenario_tasks( sequence_name, sequence_task_ids, self.id, _scenario_task_ids ) @@ -168,7 +171,7 @@ def sequences( ): self._sequences = sequences actual_sequences = self._get_sequences() - for sequence_name in sequences.keys(): + for sequence_name in sequences: if not actual_sequences[sequence_name]._is_consistent(): raise InvalidSequence(actual_sequences[sequence_name].id) @@ -192,8 +195,13 @@ def add_sequence( SequenceTaskDoesNotExistInScenario^: If a task in the sequence does not exist in the scenario. """ _scenario = _Reloader()._reload(self._MANAGER_NAME, self) - _scenario_task_ids = set([task.id if isinstance(task, Task) else task for task in _scenario._tasks]) - _sequence_task_ids: Set[TaskId] = set([task.id if isinstance(task, Task) else task for task in tasks]) + _scenario_task_ids = { + task.id if isinstance(task, Task) else task + for task in _scenario._tasks + } + _sequence_task_ids: Set[TaskId] = { + task.id if isinstance(task, Task) else task for task in tasks + } self.__check_sequence_tasks_exist_in_scenario_tasks(name, _sequence_task_ids, self.id, _scenario_task_ids) _sequences = _Reloader()._reload(self._MANAGER_NAME, self)._sequences _sequences.update( @@ -225,9 +233,14 @@ def add_sequences(self, sequences: Dict[str, Union[List[Task], List[TaskId]]]): SequenceTaskDoesNotExistInScenario^: If a task in the sequence does not exist in the scenario. """ _scenario = _Reloader()._reload(self._MANAGER_NAME, self) - _sc_task_ids = set([task.id if isinstance(task, Task) else task for task in _scenario._tasks]) + _sc_task_ids = { + task.id if isinstance(task, Task) else task + for task in _scenario._tasks + } for name, tasks in sequences.items(): - _seq_task_ids: Set[TaskId] = set([task.id if isinstance(task, Task) else task for task in tasks]) + _seq_task_ids: Set[TaskId] = { + task.id if isinstance(task, Task) else task for task in tasks + } self.__check_sequence_tasks_exist_in_scenario_tasks(name, _seq_task_ids, self.id, _sc_task_ids) # Need to parse twice the sequences to avoid adding some sequences and not others in case of exception for name, tasks in sequences.items(): @@ -269,11 +282,11 @@ def remove_sequences(self, sequence_names: List[str]): def __check_sequence_tasks_exist_in_scenario_tasks( sequence_name: str, sequence_task_ids: Set[TaskId], scenario_id: ScenarioId, scenario_task_ids: Set[TaskId] ): - non_existing_sequence_task_ids_in_scenario = set() - for sequence_task_id in sequence_task_ids: - if sequence_task_id not in scenario_task_ids: - non_existing_sequence_task_ids_in_scenario.add(sequence_task_id) - if len(non_existing_sequence_task_ids_in_scenario) > 0: + if non_existing_sequence_task_ids_in_scenario := { + sequence_task_id + for sequence_task_id in sequence_task_ids + if sequence_task_id not in scenario_task_ids + }: raise SequenceTaskDoesNotExistInScenario( list(non_existing_sequence_task_ids_in_scenario), sequence_name, scenario_id ) @@ -584,13 +597,17 @@ def _is_consistent(self) -> bool: return True if not nx.is_directed_acyclic_graph(dag): return False - for left_node, right_node in dag.edges: - if (isinstance(left_node, DataNode) and isinstance(right_node, Task)) or ( - isinstance(left_node, Task) and isinstance(right_node, DataNode) - ): - continue - return False - return True + return not any( + ( + not isinstance(left_node, DataNode) + or not isinstance(right_node, Task) + ) + and ( + not isinstance(left_node, Task) + or not isinstance(right_node, DataNode) + ) + for left_node, right_node in dag.edges + ) @_make_event.register(Scenario) diff --git a/taipy/core/sequence/_sequence_manager.py b/taipy/core/sequence/_sequence_manager.py index f9280f19e2..359682b378 100644 --- a/taipy/core/sequence/_sequence_manager.py +++ b/taipy/core/sequence/_sequence_manager.py @@ -150,14 +150,13 @@ def _create( task_manager = _TaskManagerFactory._build_manager() _tasks: List[Task] = [] for task in tasks: - if not isinstance(task, Task): - if _task := task_manager._get(task): - _tasks.append(_task) - else: - raise NonExistingTask(task) - else: + if isinstance(task, Task): _tasks.append(task) + elif _task := task_manager._get(task): + _tasks.append(_task) + else: + raise NonExistingTask(task) properties = properties if properties else {} properties["name"] = sequence_name version = version if version else cls._get_latest_version() @@ -226,9 +225,13 @@ def _get_all_by(cls, filters: Optional[List[Dict]] = None) -> List[Sequence]: filtered_sequences = [] for sequence in sequences: - for filter in filters: - if all([getattr(sequence, key) == item for key, item in filter.items()]): - filtered_sequences.append(sequence) + filtered_sequences.extend( + sequence + for filter in filters + if all( + getattr(sequence, key) == item for key, item in filter.items() + ) + ) return filtered_sequences @classmethod @@ -332,7 +335,7 @@ def _exists(cls, entity_id: str) -> bool: """ Returns True if the entity id exists. """ - return True if cls._get(entity_id) else False + return bool(cls._get(entity_id)) @classmethod def _export(cls, id: str, folder_path: Union[str, pathlib.Path]): @@ -353,12 +356,11 @@ def _export(cls, id: str, folder_path: Union[str, pathlib.Path]): sequence = {"id": id, "owner_id": scenario_id, "parent_ids": [scenario_id], "name": sequence_name} scenario = _ScenarioManagerFactory._build_manager()._get(scenario_id) - if sequence_data := scenario._sequences.get(sequence_name, None): - sequence.update(sequence_data) - with open(export_path, "w", encoding="utf-8") as export_file: - export_file.write(json.dumps(sequence)) - else: + if not (sequence_data := scenario._sequences.get(sequence_name, None)): raise ModelNotFound(cls._model_name, id) + sequence.update(sequence_data) + with open(export_path, "w", encoding="utf-8") as export_file: + export_file.write(json.dumps(sequence)) @classmethod def __log_error_entity_not_found(cls, sequence_id: Union[SequenceId, str]): diff --git a/taipy/core/sequence/_sequence_manager_factory.py b/taipy/core/sequence/_sequence_manager_factory.py index 2f441e730e..ac6dd37dcd 100644 --- a/taipy/core/sequence/_sequence_manager_factory.py +++ b/taipy/core/sequence/_sequence_manager_factory.py @@ -19,10 +19,11 @@ class _SequenceManagerFactory(_ManagerFactory): @classmethod def _build_manager(cls) -> Type[_SequenceManager]: # type: ignore - if cls._using_enterprise(): - sequence_manager = _load_fct( - cls._TAIPY_ENTERPRISE_CORE_MODULE + ".sequence._sequence_manager", "_SequenceManager" - ) # type: ignore - else: - sequence_manager = _SequenceManager - return sequence_manager # type: ignore + return ( + _load_fct( + f"{cls._TAIPY_ENTERPRISE_CORE_MODULE}.sequence._sequence_manager", + "_SequenceManager", + ) + if cls._using_enterprise() + else _SequenceManager + ) diff --git a/taipy/core/sequence/sequence.py b/taipy/core/sequence/sequence.py index a31383f374..ededd551e5 100644 --- a/taipy/core/sequence/sequence.py +++ b/taipy/core/sequence/sequence.py @@ -135,13 +135,17 @@ def _is_consistent(self) -> bool: return False if not nx.is_weakly_connected(dag): return False - for left_node, right_node in dag.edges: - if (isinstance(left_node, DataNode) and isinstance(right_node, Task)) or ( - isinstance(left_node, Task) and isinstance(right_node, DataNode) - ): - continue - return False - return True + return not any( + ( + not isinstance(left_node, DataNode) + or not isinstance(right_node, Task) + ) + and ( + not isinstance(left_node, Task) + or not isinstance(right_node, DataNode) + ) + for left_node, right_node in dag.edges + ) def _get_tasks(self) -> Dict[str, Task]: from ..task._task_manager_factory import _TaskManagerFactory diff --git a/taipy/core/submission/_submission_converter.py b/taipy/core/submission/_submission_converter.py index a7a85cb5a2..021cef4bba 100644 --- a/taipy/core/submission/_submission_converter.py +++ b/taipy/core/submission/_submission_converter.py @@ -34,7 +34,7 @@ def _entity_to_model(cls, submission: Submission) -> _SubmissionModel: @classmethod def _model_to_entity(cls, model: _SubmissionModel) -> Submission: - submission = Submission( + return Submission( entity_id=model.entity_id, entity_type=model.entity_type, entity_config_id=model.entity_config_id, @@ -44,4 +44,3 @@ def _model_to_entity(cls, model: _SubmissionModel) -> Submission: submission_status=model.submission_status, version=model.version, ) - return submission diff --git a/taipy/core/submission/_submission_manager.py b/taipy/core/submission/_submission_manager.py index b91c30af53..623f4b3199 100644 --- a/taipy/core/submission/_submission_manager.py +++ b/taipy/core/submission/_submission_manager.py @@ -46,10 +46,16 @@ def _create(cls, entity_id: str, entity_type: str, entity_config: Optional[str]) @classmethod def _get_latest(cls, entity: Union[Scenario, Sequence, Task]) -> Optional[Submission]: entity_id = entity.id if not isinstance(entity, str) else entity - submissions_of_task = list(filter(lambda submission: submission.entity_id == entity_id, cls._get_all())) - if len(submissions_of_task) == 0: - return None - if len(submissions_of_task) == 1: - return submissions_of_task[0] + if submissions_of_task := list( + filter( + lambda submission: submission.entity_id == entity_id, + cls._get_all(), + ) + ): + return ( + submissions_of_task[0] + if len(submissions_of_task) == 1 + else max(submissions_of_task) + ) else: - return max(submissions_of_task) + return None diff --git a/taipy/core/submission/_submission_manager_factory.py b/taipy/core/submission/_submission_manager_factory.py index cd7b6689cf..4b4dac9d58 100644 --- a/taipy/core/submission/_submission_manager_factory.py +++ b/taipy/core/submission/_submission_manager_factory.py @@ -26,12 +26,13 @@ class _SubmissionManagerFactory(_ManagerFactory): def _build_manager(cls) -> Type[_SubmissionManager]: # type: ignore if cls._using_enterprise(): submission_manager = _load_fct( - cls._TAIPY_ENTERPRISE_CORE_MODULE + ".submission._submission_manager", "_SubmissionManager" - ) # type: ignore + f"{cls._TAIPY_ENTERPRISE_CORE_MODULE}.submission._submission_manager", + "_SubmissionManager", + ) build_repository = _load_fct( - cls._TAIPY_ENTERPRISE_CORE_MODULE + ".submission._submission_manager_factory", + f"{cls._TAIPY_ENTERPRISE_CORE_MODULE}.submission._submission_manager_factory", "_SubmissionManagerFactory", - )._build_repository # type: ignore + )._build_repository else: submission_manager = _SubmissionManager build_repository = cls._build_repository diff --git a/taipy/core/submission/submission.py b/taipy/core/submission/submission.py index 86783d3439..4212fd7ee7 100644 --- a/taipy/core/submission/submission.py +++ b/taipy/core/submission/submission.py @@ -112,13 +112,9 @@ def get_simple_label(self) -> str: @property # type: ignore @_self_reload(_MANAGER_NAME) def jobs(self) -> List[Job]: - jobs = [] job_manager = _JobManagerFactory._build_manager() - for job in self._jobs: - jobs.append(job_manager._get(job)) - - return jobs + return [job_manager._get(job) for job in self._jobs] @jobs.setter # type: ignore @_self_setter(_MANAGER_NAME) @@ -169,13 +165,13 @@ def _update_submission_status(self, job: Job): elif job_status == Status.BLOCKED: self.__blocked_jobs.add(job.id) self.__pending_jobs.discard(job.id) - elif job_status == Status.PENDING or job_status == Status.SUBMITTED: + elif job_status in [Status.PENDING, Status.SUBMITTED]: self.__pending_jobs.add(job.id) self.__blocked_jobs.discard(job.id) elif job_status == Status.RUNNING: self.__running_jobs.add(job.id) self.__pending_jobs.discard(job.id) - elif job_status == Status.COMPLETED or job_status == Status.SKIPPED: + elif job_status in [Status.COMPLETED, Status.SKIPPED]: self.__completed = True self.__blocked_jobs.discard(job.id) self.__pending_jobs.discard(job.id) diff --git a/taipy/core/taipy.py b/taipy/core/taipy.py index 3bcf419e9f..e54b09a37e 100644 --- a/taipy/core/taipy.py +++ b/taipy/core/taipy.py @@ -860,7 +860,7 @@ def update_parent_dict(parents_set, parent_dict): current_parent_dict: Dict[str, Set] = {} for parent in entity.parent_ids: parent_entity = get(parent) - if parent_entity._MANAGER_NAME in current_parent_dict.keys(): + if parent_entity._MANAGER_NAME in current_parent_dict: current_parent_dict[parent_entity._MANAGER_NAME].add(parent_entity) else: current_parent_dict[parent_entity._MANAGER_NAME] = {parent_entity} @@ -892,7 +892,7 @@ def get_cycles_scenarios() -> Dict[Optional[Cycle], List[Scenario]]: cycles_scenarios: Dict[Optional[Cycle], List[Scenario]] = {} for scenario in get_scenarios(): - if scenario.cycle in cycles_scenarios.keys(): + if scenario.cycle in cycles_scenarios: cycles_scenarios[scenario.cycle].append(scenario) else: cycles_scenarios[scenario.cycle] = [scenario] diff --git a/taipy/core/task/_task_manager.py b/taipy/core/task/_task_manager.py index 15f0423033..587389eafa 100644 --- a/taipy/core/task/_task_manager.py +++ b/taipy/core/task/_task_manager.py @@ -70,7 +70,7 @@ def _bulk_get_or_create( Config.data_nodes[dnc.id] for dnc in task_config.input_configs ] task_config_data_nodes = [data_nodes[dn_config] for dn_config in task_dn_configs] - scope = min(dn.scope for dn in task_config_data_nodes) if len(task_config_data_nodes) != 0 else Scope.GLOBAL + scope = min((dn.scope for dn in task_config_data_nodes), default=Scope.GLOBAL) owner_id: Union[Optional[SequenceId], Optional[ScenarioId], Optional[CycleId]] if scope == Scope.SCENARIO: owner_id = scenario_id @@ -87,9 +87,7 @@ def _bulk_get_or_create( tasks = [] for task_config, owner_id in tasks_configs_and_owner_id: - if task := tasks_by_config.get((task_config, owner_id)): - tasks.append(task) - else: + if not (task := tasks_by_config.get((task_config, owner_id))): version = _VersionManagerFactory._build_manager()._get_latest_version() inputs = [ data_nodes[input_config] @@ -115,7 +113,7 @@ def _bulk_get_or_create( dn._parent_ids.update([task.id]) cls._set(task) Notifier.publish(_make_event(task, EventOperation.CREATION)) - tasks.append(task) + tasks.append(task) return tasks @classmethod diff --git a/taipy/core/task/_task_manager_factory.py b/taipy/core/task/_task_manager_factory.py index b1535711d6..360e32e993 100644 --- a/taipy/core/task/_task_manager_factory.py +++ b/taipy/core/task/_task_manager_factory.py @@ -26,11 +26,13 @@ class _TaskManagerFactory(_ManagerFactory): def _build_manager(cls) -> Type[_TaskManager]: # type: ignore if cls._using_enterprise(): task_manager = _load_fct( - cls._TAIPY_ENTERPRISE_CORE_MODULE + ".task._task_manager", "_TaskManager" - ) # type: ignore + f"{cls._TAIPY_ENTERPRISE_CORE_MODULE}.task._task_manager", + "_TaskManager", + ) build_repository = _load_fct( - cls._TAIPY_ENTERPRISE_CORE_MODULE + ".task._task_manager_factory", "_TaskManagerFactory" - )._build_repository # type: ignore + f"{cls._TAIPY_ENTERPRISE_CORE_MODULE}.task._task_manager_factory", + "_TaskManagerFactory", + )._build_repository else: task_manager = _TaskManager build_repository = cls._build_repository diff --git a/taipy/core/task/_task_model.py b/taipy/core/task/_task_model.py index 2c671c1ee0..ad3b2cff49 100644 --- a/taipy/core/task/_task_model.py +++ b/taipy/core/task/_task_model.py @@ -53,7 +53,9 @@ def from_dict(data: Dict[str, Any]): return _TaskModel( id=data["id"], owner_id=data.get("owner_id"), - parent_ids=_BaseModel._deserialize_attribute(data.get("parent_ids", [])), + parent_ids=_BaseModel._deserialize_attribute( + data.get("parent_ids", []) + ), config_id=data["config_id"], input_ids=_BaseModel._deserialize_attribute(data["input_ids"]), function_name=data["function_name"], @@ -61,7 +63,9 @@ def from_dict(data: Dict[str, Any]): output_ids=_BaseModel._deserialize_attribute(data["output_ids"]), version=data["version"], skippable=data["skippable"], - properties=_BaseModel._deserialize_attribute(data["properties"] if "properties" in data.keys() else {}), + properties=_BaseModel._deserialize_attribute( + data.get("properties", {}) + ), ) def to_list(self): diff --git a/taipy/core/task/task.py b/taipy/core/task/task.py index 5772d75264..c9389450f1 100644 --- a/taipy/core/task/task.py +++ b/taipy/core/task/task.py @@ -161,8 +161,11 @@ def scope(self) -> Scope: either no input or no output. """ data_nodes = list(self.__input.values()) + list(self.__output.values()) - scope = Scope(min(dn.scope for dn in data_nodes)) if len(data_nodes) != 0 else Scope.GLOBAL - return scope + return ( + Scope(min(dn.scope for dn in data_nodes)) + if len(data_nodes) != 0 + else Scope.GLOBAL + ) @property def version(self): diff --git a/taipy/gui/_gui_cli.py b/taipy/gui/_gui_cli.py index faedaee67b..9f4ec89a6d 100644 --- a/taipy/gui/_gui_cli.py +++ b/taipy/gui/_gui_cli.py @@ -99,6 +99,6 @@ def parse_arguments(cls): @classmethod def __add_taipy_prefix(cls, key: str): if key.startswith("--no-"): - return key[:5] + "taipy-" + key[5:] + return f"{key[:5]}taipy-{key[5:]}" - return key[:2] + "taipy-" + key[2:] + return f"{key[:2]}taipy-{key[2:]}" diff --git a/taipy/gui/_renderers/_markdown/preproc.py b/taipy/gui/_renderers/_markdown/preproc.py index 0ef13e26db..5727ea50ba 100644 --- a/taipy/gui/_renderers/_markdown/preproc.py +++ b/taipy/gui/_renderers/_markdown/preproc.py @@ -73,8 +73,8 @@ def _make_prop_pair(self, prop_name: str, prop_value: str) -> Tuple[str, str]: return (prop_name, prop_value.replace("\\|", "|")) def run(self, lines: List[str]) -> List[str]: - new_lines = [] tag_queue = [] + new_lines = [] for line_count, line in enumerate(lines, start=1): new_line = "" last_index = 0 @@ -150,11 +150,7 @@ def run(self, lines: List[str]) -> List[str]: + new_line[m.end() :] ) else: - new_line = ( - new_line[: m.start()] - + f"