Skip to content

Commit

Permalink
create constants for frequently used labels
Browse files Browse the repository at this point in the history
  • Loading branch information
MorrisNein committed Oct 30, 2023
1 parent 1c7ef13 commit 0af205f
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 13 deletions.
12 changes: 8 additions & 4 deletions golem/core/optimisers/opt_history_objects/opt_history.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,10 @@
if TYPE_CHECKING:
from golem.core.optimisers.opt_history_objects.individual import Individual

INITIAL_ASSUMPTIONS_LABEL = 'initial_assumptions'
EVOLUTION_RESULTS_LABEL = 'evolution_results'
TUNING_START_LABEL = 'tuning_start'
TUNING_RESULT_LABEL = 'tuning_result'

class OptHistory:
"""
Expand Down Expand Up @@ -212,7 +216,7 @@ def initial_assumptions(self) -> Optional[Generation]:
if not self.generations:
return None
for gen in self.generations:
if gen.label == 'initial_assumptions':
if gen.label == INITIAL_ASSUMPTIONS_LABEL:
return gen

@property
Expand All @@ -224,23 +228,23 @@ def evolution_results(self) -> Optional[Generation]:
if not self.generations:
return None
for gen in reversed(self.generations):
if gen.label == 'evolution_results':
if gen.label == EVOLUTION_RESULTS_LABEL:
return gen

@property
def tuning_start(self) -> Optional[Generation]:
if not self.generations:
return None
for gen in reversed(self.generations):
if gen.label == 'tuning_start':
if gen.label == TUNING_START_LABEL:
return gen

@property
def tuning_result(self) -> Optional[Generation]:
if not self.generations:
return None
for gen in reversed(self.generations):
if gen.label == 'tuning_result':
if gen.label == TUNING_RESULT_LABEL:
return gen

@property
Expand Down
9 changes: 5 additions & 4 deletions golem/core/optimisers/random/random_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@
from golem.core.optimisers.graph import OptGraph
from golem.core.optimisers.objective import Objective, ObjectiveFunction
from golem.core.optimisers.opt_history_objects.individual import Individual
from golem.core.optimisers.opt_history_objects.opt_history import EVOLUTION_RESULTS_LABEL, INITIAL_ASSUMPTIONS_LABEL
from golem.core.optimisers.optimization_parameters import GraphRequirements
from golem.core.optimisers.optimizer import GraphOptimizer, GraphGenerationParams
from golem.core.optimisers.optimizer import GraphGenerationParams, GraphOptimizer
from golem.core.optimisers.timer import OptimisationTimer
from golem.core.utilities.grouped_condition import GroupedCondition

Expand All @@ -34,7 +35,7 @@ def __init__(self,
'Optimisation stopped: Time limit is reached'
).add_condition(
lambda: requirements.num_of_generations is not None and
self.current_iteration_num >= requirements.num_of_generations,
self.current_iteration_num >= requirements.num_of_generations,
'Optimisation stopped: Max number of iterations reached')

def optimise(self, objective: ObjectiveFunction) -> Sequence[OptGraph]:
Expand All @@ -46,14 +47,14 @@ def optimise(self, objective: ObjectiveFunction) -> Sequence[OptGraph]:

with self.timer, self._progressbar as pbar:
self.best_individual = self._eval_initial_individual(evaluator)
self._update_best_individual(self.best_individual, 'initial_assumptions')
self._update_best_individual(self.best_individual, INITIAL_ASSUMPTIONS_LABEL)
while not self.stop_optimization():
new_individual = self._generate_new_individual()
evaluator([new_individual])
self.current_iteration_num += 1
self._update_best_individual(new_individual)
pbar.update()
self._update_best_individual(self.best_individual, 'evolution_results')
self._update_best_individual(self.best_individual, EVOLUTION_RESULTS_LABEL)
pbar.close()
return [self.best_individual.graph]

Expand Down
10 changes: 5 additions & 5 deletions golem/core/tuning/tuner_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from golem.core.optimisers.graph import OptGraph
from golem.core.optimisers.objective import ObjectiveEvaluate, ObjectiveFunction
from golem.core.optimisers.opt_history_objects.individual import Individual
from golem.core.optimisers.opt_history_objects.opt_history import OptHistory
from golem.core.optimisers.opt_history_objects.opt_history import OptHistory, TUNING_RESULT_LABEL, TUNING_START_LABEL
from golem.core.optimisers.opt_history_objects.parent_operator import ParentOperator
from golem.core.tuning.search_space import SearchSpace, convert_parameters
from golem.core.utilities.data_structures import ensure_wrapped_in_sequence
Expand Down Expand Up @@ -96,7 +96,7 @@ def init_check(self, graph: OptGraph) -> None:
graph = deepcopy(graph)
fitness = self.objective_evaluate(graph)
self.init_individual = self._create_individual(graph, fitness)
self._add_to_history([self.init_individual], label='tuning_start')
self._add_to_history([self.init_individual], label=TUNING_START_LABEL)

init_metric = self._fitness_to_metric_value(fitness)
self.log.message(f'Initial graph: {graph_structure(graph)} \n'
Expand Down Expand Up @@ -154,7 +154,7 @@ def _single_obj_final_check(self, tuned_graph: OptGraph):
self.log.message('Final metric is None')

self.obtained_individual = final_individual
self._add_to_history([self.obtained_individual], label='tuning_result')
self._add_to_history([self.obtained_individual], label=TUNING_RESULT_LABEL)

return self.obtained_individual.graph

Expand All @@ -179,7 +179,7 @@ def _multi_obj_final_check(self, tuned_graphs: Sequence[OptGraph]) -> Sequence[O
self.obtained_individual = [self.init_individual]
final_graphs = [self.init_individual.graph]

self._add_to_history(self.obtained_individual, label='tuning_result')
self._add_to_history(self.obtained_individual, label=TUNING_RESULT_LABEL)

return final_graphs

Expand Down Expand Up @@ -284,7 +284,7 @@ def _add_to_history(self, individuals: Sequence[Individual], label: Optional[str

if label is None:
label = f'tuning_iteration_{self.evaluations_count}'
if label not in ('tuning_start', 'tuning_result'):
if label not in (TUNING_START_LABEL, TUNING_RESULT_LABEL):
individuals = list(individuals)
individuals.append(self.init_individual) # add initial individual to maintain consistency of inheritance
history.add_to_history(individuals=individuals,
Expand Down

0 comments on commit 0af205f

Please sign in to comment.