diff --git a/google/cloud/aiplatform/hyperparameter_tuning.py b/google/cloud/aiplatform/hyperparameter_tuning.py index a43f1c39fd..a0ef82ebb9 100644 --- a/google/cloud/aiplatform/hyperparameter_tuning.py +++ b/google/cloud/aiplatform/hyperparameter_tuning.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -29,6 +29,10 @@ "unspecified": gca_study_compat.StudySpec.ParameterSpec.ScaleType.SCALE_TYPE_UNSPECIFIED, } +_INT_VALUE_SPEC = "integer_value_spec" +_DISCRETE_VALUE_SPEC = "discrete_value_spec" +_CATEGORICAL_VALUE_SPEC = "categorical_value_spec" + class _ParameterSpec(metaclass=abc.ABCMeta): """Base class represents a single parameter to optimize.""" @@ -77,10 +81,30 @@ def _to_parameter_spec( self, parameter_id: str ) -> gca_study_compat.StudySpec.ParameterSpec: """Converts this parameter to ParameterSpec.""" - # TODO: Conditional parameters + conditions = [] + if self.conditional_parameter_spec is not None: + for (conditional_param_id, spec) in self.conditional_parameter_spec.items(): + condition = ( + gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec() + ) + if self._parameter_spec_value_key == _INT_VALUE_SPEC: + condition.parent_int_values = gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec.IntValueCondition( + values=spec.parent_values + ) + elif self._parameter_spec_value_key == _CATEGORICAL_VALUE_SPEC: + condition.parent_categorical_values = gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec.CategoricalValueCondition( + values=spec.parent_values + ) + elif self._parameter_spec_value_key == _DISCRETE_VALUE_SPEC: + condition.parent_discrete_values = gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec.DiscreteValueCondition( + values=spec.parent_values + ) + condition.parameter_spec = spec._to_parameter_spec(conditional_param_id) + conditions.append(condition) parameter_spec = gca_study_compat.StudySpec.ParameterSpec( parameter_id=parameter_id, scale_type=_SCALE_TYPE_MAP.get(getattr(self, "scale", "unspecified")), + conditional_parameter_specs=conditions, ) setattr( @@ -105,6 +129,8 @@ def __init__( min: float, max: float, scale: str, + conditional_parameter_spec: Optional[Dict[str, "_ParameterSpec"]] = None, + parent_values: Optional[Sequence[Union[int, float, str]]] = None, ): """ Value specification for a parameter in ``DOUBLE`` type. @@ -120,9 +146,16 @@ def __init__( Required. The type of scaling that should be applied to this parameter. Accepts: 'linear', 'log', 'reverse_log' + conditional_parameter_spec (Dict[str, _ParameterSpec]): + Optional. The conditional parameters associated with the object. The dictionary key + is the ID of the conditional parameter and the dictionary value is one of + `IntegerParameterSpec`, `CategoricalParameterSpec`, or `DiscreteParameterSpec` + parent_values (Sequence[Union[int, float, str]]): + Optional. This argument is only needed when the object is a conditional parameter + and specifies the parent parameter's values for which the condition applies. """ - super().__init__() + super().__init__(conditional_parameter_spec, parent_values) self.min = min self.max = max @@ -142,6 +175,8 @@ def __init__( min: int, max: int, scale: str, + conditional_parameter_spec: Optional[Dict[str, "_ParameterSpec"]] = None, + parent_values: Optional[Sequence[Union[int, float, str]]] = None, ): """ Value specification for a parameter in ``INTEGER`` type. @@ -157,9 +192,18 @@ def __init__( Required. The type of scaling that should be applied to this parameter. Accepts: 'linear', 'log', 'reverse_log' + conditional_parameter_spec (Dict[str, _ParameterSpec]): + Optional. The conditional parameters associated with the object. The dictionary key + is the ID of the conditional parameter and the dictionary value is one of + `IntegerParameterSpec`, `CategoricalParameterSpec`, or `DiscreteParameterSpec` + parent_values (Sequence[int]): + Optional. This argument is only needed when the object is a conditional parameter + and specifies the parent parameter's values for which the condition applies. """ - - super().__init__() + super().__init__( + conditional_parameter_spec=conditional_parameter_spec, + parent_values=parent_values, + ) self.min = min self.max = max @@ -177,15 +221,26 @@ class CategoricalParameterSpec(_ParameterSpec): def __init__( self, values: Sequence[str], + conditional_parameter_spec: Optional[Dict[str, "_ParameterSpec"]] = None, + parent_values: Optional[Sequence[Union[int, float, str]]] = None, ): """Value specification for a parameter in ``CATEGORICAL`` type. Args: values (Sequence[str]): Required. The list of possible categories. + conditional_parameter_spec (Dict[str, _ParameterSpec]): + Optional. The conditional parameters associated with the object. The dictionary key + is the ID of the conditional parameter and the dictionary value is one of + `IntegerParameterSpec`, `CategoricalParameterSpec`, or `DiscreteParameterSpec` + parent_values (Sequence[str]): + Optional. This argument is only needed when the object is a conditional parameter + and specifies the parent parameter's values for which the condition applies. """ - - super().__init__() + super().__init__( + conditional_parameter_spec=conditional_parameter_spec, + parent_values=parent_values, + ) self.values = values @@ -202,6 +257,8 @@ def __init__( self, values: Sequence[float], scale: str, + conditional_parameter_spec: Optional[Dict[str, "_ParameterSpec"]] = None, + parent_values: Optional[Sequence[Union[int, float, str]]] = None, ): """Value specification for a parameter in ``DISCRETE`` type. @@ -216,9 +273,18 @@ def __init__( Required. The type of scaling that should be applied to this parameter. Accepts: 'linear', 'log', 'reverse_log' + conditional_parameter_spec (Dict[str, _ParameterSpec]): + Optional. The conditional parameters associated with the object. The dictionary key + is the ID of the conditional parameter and the dictionary value is one of + `IntegerParameterSpec`, `CategoricalParameterSpec`, or `DiscreteParameterSpec` + parent_values (Sequence[float]): + Optional. This argument is only needed when the object is a conditional parameter + and specifies the parent parameter's values for which the condition applies. """ - - super().__init__() + super().__init__( + conditional_parameter_spec=conditional_parameter_spec, + parent_values=parent_values, + ) self.values = values self.scale = scale diff --git a/tests/unit/aiplatform/test_hyperparameter_tuning_job.py b/tests/unit/aiplatform/test_hyperparameter_tuning_job.py index 30a2ea40be..163eeb8300 100644 --- a/tests/unit/aiplatform/test_hyperparameter_tuning_job.py +++ b/tests/unit/aiplatform/test_hyperparameter_tuning_job.py @@ -76,6 +76,13 @@ _TEST_LABELS = {"my_hp_key": "my_hp_value"} +_TEST_CONDITIONAL_PARAMETER_DECAY = hpt.DoubleParameterSpec( + min=1e-07, max=1, scale="linear", parent_values=[32, 64] +) +_TEST_CONDITIONAL_PARAMETER_LR = hpt.DoubleParameterSpec( + min=1e-07, max=1, scale="linear", parent_values=[4, 8, 16] +) + _TEST_BASE_HYPERPARAMETER_TUNING_JOB_PROTO = gca_hyperparameter_tuning_job_compat.HyperparameterTuningJob( display_name=_TEST_DISPLAY_NAME, study_spec=gca_study_compat.StudySpec( @@ -109,8 +116,34 @@ parameter_id="batch_size", scale_type=gca_study_compat.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE, discrete_value_spec=gca_study_compat.StudySpec.ParameterSpec.DiscreteValueSpec( - values=[16, 32] + values=[4, 8, 16, 32, 64] ), + conditional_parameter_specs=[ + gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec( + parent_discrete_values=gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec.DiscreteValueCondition( + values=[32, 64] + ), + parameter_spec=gca_study_compat.StudySpec.ParameterSpec( + double_value_spec=gca_study_compat.StudySpec.ParameterSpec.DoubleValueSpec( + min_value=1e-07, max_value=1 + ), + scale_type=gca_study_compat.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE, + parameter_id="decay", + ), + ), + gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec( + parent_discrete_values=gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec.DiscreteValueCondition( + values=[4, 8, 16] + ), + parameter_spec=gca_study_compat.StudySpec.ParameterSpec( + double_value_spec=gca_study_compat.StudySpec.ParameterSpec.DoubleValueSpec( + min_value=1e-07, max_value=1 + ), + scale_type=gca_study_compat.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE, + parameter_id="learning_rate", + ), + ), + ], ), ], algorithm=gca_study_compat.StudySpec.Algorithm.RANDOM_SEARCH, @@ -388,7 +421,12 @@ def test_create_hyperparameter_tuning_job( values=["relu", "sigmoid", "elu", "selu", "tanh"] ), "batch_size": hpt.DiscreteParameterSpec( - values=[16, 32], scale="linear" + values=[4, 8, 16, 32, 64], + scale="linear", + conditional_parameter_spec={ + "decay": _TEST_CONDITIONAL_PARAMETER_DECAY, + "learning_rate": _TEST_CONDITIONAL_PARAMETER_LR, + }, ), }, parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT, @@ -454,7 +492,12 @@ def test_create_hyperparameter_tuning_job_with_timeout( values=["relu", "sigmoid", "elu", "selu", "tanh"] ), "batch_size": hpt.DiscreteParameterSpec( - values=[16, 32], scale="linear" + values=[4, 8, 16, 32, 64], + scale="linear", + conditional_parameter_spec={ + "decay": _TEST_CONDITIONAL_PARAMETER_DECAY, + "learning_rate": _TEST_CONDITIONAL_PARAMETER_LR, + }, ), }, parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT, @@ -515,7 +558,12 @@ def test_run_hyperparameter_tuning_job_with_fail_raises( values=["relu", "sigmoid", "elu", "selu", "tanh"] ), "batch_size": hpt.DiscreteParameterSpec( - values=[16, 32], scale="linear" + values=[4, 8, 16, 32, 64], + scale="linear", + conditional_parameter_spec={ + "decay": _TEST_CONDITIONAL_PARAMETER_DECAY, + "learning_rate": _TEST_CONDITIONAL_PARAMETER_LR, + }, ), }, parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT, @@ -574,7 +622,12 @@ def test_run_hyperparameter_tuning_job_with_fail_at_creation(self): values=["relu", "sigmoid", "elu", "selu", "tanh"] ), "batch_size": hpt.DiscreteParameterSpec( - values=[16, 32], scale="linear" + values=[4, 8, 16, 32, 64], + scale="linear", + conditional_parameter_spec={ + "decay": _TEST_CONDITIONAL_PARAMETER_DECAY, + "learning_rate": _TEST_CONDITIONAL_PARAMETER_LR, + }, ), }, parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT, @@ -639,7 +692,12 @@ def test_hyperparameter_tuning_job_get_state_raises_without_run(self): values=["relu", "sigmoid", "elu", "selu", "tanh"] ), "batch_size": hpt.DiscreteParameterSpec( - values=[16, 32, 64], scale="linear" + values=[4, 8, 16, 32, 64], + scale="linear", + conditional_parameter_spec={ + "decay": _TEST_CONDITIONAL_PARAMETER_DECAY, + "learning_rate": _TEST_CONDITIONAL_PARAMETER_LR, + }, ), }, parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT, @@ -697,7 +755,12 @@ def test_create_hyperparameter_tuning_job_with_tensorboard( values=["relu", "sigmoid", "elu", "selu", "tanh"] ), "batch_size": hpt.DiscreteParameterSpec( - values=[16, 32], scale="linear" + values=[4, 8, 16, 32, 64], + scale="linear", + conditional_parameter_spec={ + "decay": _TEST_CONDITIONAL_PARAMETER_DECAY, + "learning_rate": _TEST_CONDITIONAL_PARAMETER_LR, + }, ), }, parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT, @@ -769,7 +832,12 @@ def test_create_hyperparameter_tuning_job_with_enable_web_access( values=["relu", "sigmoid", "elu", "selu", "tanh"] ), "batch_size": hpt.DiscreteParameterSpec( - values=[16, 32], scale="linear" + values=[4, 8, 16, 32, 64], + scale="linear", + conditional_parameter_spec={ + "decay": _TEST_CONDITIONAL_PARAMETER_DECAY, + "learning_rate": _TEST_CONDITIONAL_PARAMETER_LR, + }, ), }, parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT,