Skip to content

Commit

Permalink
Code review fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
nicl-nno committed Feb 26, 2024
1 parent f3c9f58 commit 7d8d3b2
Show file tree
Hide file tree
Showing 7 changed files with 12 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
from fedot.core.pipelines.node import PipelineNode
from fedot.core.pipelines.pipeline import Pipeline
from fedot.core.pipelines.pipeline_composer_requirements import PipelineComposerRequirements
from fedot.core.pipelines.tuning.hyperparams import ParametersChanger
from fedot.core.pipelines.tuning.search_space import PipelineSearchSpace
from fedot.core.pipelines.tuning.tuner_builder import TunerBuilder
from fedot.core.repository.metrics_repository import ClassificationMetricsEnum, ComplexityMetricsEnum
Expand Down Expand Up @@ -113,7 +112,7 @@ def run_image_classification_automl(train_dataset: tuple,
pop_size = 5

# search space for hyper-parametric mutation
ParametersChanger.custom_search_space = custom_search_space
PipelineSearchSpace.pre_defined_custom_search_space = custom_search_space

params = GPAlgorithmParameters(
selection_types=[SelectionTypesEnum.spea2],
Expand Down
6 changes: 3 additions & 3 deletions examples/advanced/customization/implementations/cnn_impls.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,9 +135,9 @@ def fit(self, train_data):
self.model = cnn_model_dict[self.params.get('architecture_type')](input_shape=train_data.features.shape[1:4],
num_classes=len(self.classes))

# self.model = fit_cnn(train_data=train_data, model=self.model, epochs=self.params.get('epochs'),
# batch_size=self.params.get('batch_size'),
# optimizer_params=self.params.get('optimizer_parameters'), logger=self.params.get('log'))
self.model = fit_cnn(train_data=train_data, model=self.model, epochs=self.params.get('epochs'),
batch_size=self.params.get('batch_size'),
optimizer_params=self.params.get('optimizer_parameters'), logger=self.params.get('log'))
return self.model

def predict(self, input_data):
Expand Down
2 changes: 1 addition & 1 deletion fedot/core/pipelines/pipeline_advisor.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def propose_change(self, node: OptNode, possible_operations: List[str]) -> List[
candidates = set.intersection({'lagged', 'sparse_lagged'}, set(possible_operations))

if 'cnn' in operation_id:
candidates = [c for c in candidates if 'cnn' in candidates]
candidates = [cand for cand in candidates if 'cnn' in cand]

if operation_id in candidates:
# the change to the same node is not meaningful
Expand Down
5 changes: 1 addition & 4 deletions fedot/core/pipelines/tuning/hyperparams.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,10 @@
class ParametersChanger:
"""
Class for the hyperparameters changing in the operation
:param operation_name: name of operation to get hyperparameters for
:param current_params: current parameters value
"""

custom_search_space = None

def __init__(self, operation_name, current_params):
self.operation_name = operation_name
self.current_params = current_params
Expand All @@ -28,7 +25,7 @@ def get_new_operation_params(self):

# Get available parameters for operation
params_list = \
PipelineSearchSpace(ParametersChanger.custom_search_space).get_parameters_for_operation(self.operation_name)
PipelineSearchSpace().get_parameters_for_operation(self.operation_name)

if not params_list:
params_dict = None
Expand Down
7 changes: 5 additions & 2 deletions fedot/core/pipelines/tuning/search_space.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,20 @@

class PipelineSearchSpace(SearchSpace):
"""
Class for extracting searching space
Class for extracting searching space for hyperparameters of pipeline
:param custom_search_space: dictionary of dictionaries of tuples (hyperopt expression (e.g. hp.choice), *params)
for applying custom hyperparameters search space
:param replace_default_search_space: whether replace default dictionary (False) or append it (True)
"""

pre_defined_custom_search_space = None # workaround to modify search space globally

def __init__(self,
custom_search_space: Optional[OperationParametersMapping] = None,
replace_default_search_space: bool = False):
self.custom_search_space = custom_search_space
self.custom_search_space = custom_search_space if PipelineSearchSpace.pre_defined_custom_search_space is None \
else PipelineSearchSpace.pre_defined_custom_search_space
self.replace_default_search_space = replace_default_search_space
parameters_per_operation = self.get_parameters_dict()
super().__init__(parameters_per_operation)
Expand Down
2 changes: 1 addition & 1 deletion test/unit/tasks/test_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from sklearn.datasets import load_iris, make_classification
from sklearn.metrics import roc_auc_score as roc_auc

from examples.simple.classification.image_classifcation_problem import run_image_classification_problem
from examples.simple.classification.image_classification_problem import run_image_classification_problem
from fedot.core.data.data import InputData
from fedot.core.data.data_split import train_test_data_setup
from fedot.core.data.supplementary_data import SupplementaryData
Expand Down

0 comments on commit 7d8d3b2

Please sign in to comment.