Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: added support for conditional parameters in hyperparameter tuning #1544

Merged
merged 21 commits into from
Aug 2, 2022
Merged
Show file tree
Hide file tree
Changes from 14 commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
0689785
feat: added support for conditional parameters in hyperparameter tuning
rosiezou Jul 27, 2022
5727896
Merge branch 'main' into conditional-hyper-param
rosiezou Jul 27, 2022
bbfa1cb
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Jul 27, 2022
a76c08f
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Jul 27, 2022
c0ec8b3
Merge branch 'conditional-hyper-param' of https://github.com/googleap…
gcf-owl-bot[bot] Jul 27, 2022
f0c7799
Merge branch 'main' into conditional-hyper-param
rosiezou Jul 28, 2022
1c6ad68
fixing unit tests
rosiezou Jul 28, 2022
f8b7697
Merge branch 'main' into conditional-hyper-param
rosiezou Jul 28, 2022
c9f2ea8
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Jul 28, 2022
193f106
Merge branch 'conditional-hyper-param' of https://github.com/googleap…
gcf-owl-bot[bot] Jul 28, 2022
727c6a0
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Jul 28, 2022
f3daefe
Merge branch 'conditional-hyper-param' of https://github.com/googleap…
gcf-owl-bot[bot] Jul 28, 2022
1f14caf
fixed all failing tests
rosiezou Jul 30, 2022
f39b33d
Merge branch 'main' into conditional-hyper-param
rosiezou Aug 1, 2022
91224f0
addressed PR comments
rosiezou Aug 1, 2022
88309fd
Merge branch 'main' into conditional-hyper-param
rosiezou Aug 1, 2022
40f0218
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Aug 1, 2022
23828bb
Merge branch 'conditional-hyper-param' of https://github.com/googleap…
gcf-owl-bot[bot] Aug 1, 2022
80dce03
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Aug 1, 2022
5715056
Merge branch 'conditional-hyper-param' of https://github.com/googleap…
gcf-owl-bot[bot] Aug 1, 2022
465db7d
Merge branch 'main' into conditional-hyper-param
rosiezou Aug 2, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 74 additions & 8 deletions google/cloud/aiplatform/hyperparameter_tuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,10 @@
"unspecified": gca_study_compat.StudySpec.ParameterSpec.ScaleType.SCALE_TYPE_UNSPECIFIED,
}

_INT_VALUE_SPEC = "integer_value_spec"
rosiezou marked this conversation as resolved.
Show resolved Hide resolved
_DISCRETE_VALUE_SPEC = "discrete_value_spec"
_CATEGORICAL_VALUE_SPEC = "categorical_value_spec"


class _ParameterSpec(metaclass=abc.ABCMeta):
"""Base class represents a single parameter to optimize."""
Expand Down Expand Up @@ -77,10 +81,30 @@ def _to_parameter_spec(
self, parameter_id: str
) -> gca_study_compat.StudySpec.ParameterSpec:
"""Converts this parameter to ParameterSpec."""
# TODO: Conditional parameters
conditions = []
if self.conditional_parameter_spec is not None:
for (conditional_param_id, spec) in self.conditional_parameter_spec.items():
condition = (
gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec()
)
if self._parameter_spec_value_key == _INT_VALUE_SPEC:
condition.parent_int_values = gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec.IntValueCondition(
values=spec.parent_values
)
elif self._parameter_spec_value_key == _CATEGORICAL_VALUE_SPEC:
condition.parent_categorical_values = gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec.CategoricalValueCondition(
values=spec.parent_values
)
elif self._parameter_spec_value_key == _DISCRETE_VALUE_SPEC:
condition.parent_discrete_values = gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec.DiscreteValueCondition(
values=spec.parent_values
)
condition.parameter_spec = spec._to_parameter_spec(conditional_param_id)
conditions.append(condition)
parameter_spec = gca_study_compat.StudySpec.ParameterSpec(
parameter_id=parameter_id,
scale_type=_SCALE_TYPE_MAP.get(getattr(self, "scale", "unspecified")),
conditional_parameter_specs=conditions,
)

setattr(
Expand All @@ -105,6 +129,8 @@ def __init__(
min: float,
max: float,
scale: str,
conditional_parameter_spec: Optional[Dict[str, "_ParameterSpec"]] = None,
jaycee-li marked this conversation as resolved.
Show resolved Hide resolved
parent_values: Optional[Sequence[Union[int, float, str]]] = None,
):
"""
Value specification for a parameter in ``DOUBLE`` type.
Expand All @@ -120,9 +146,16 @@ def __init__(
Required. The type of scaling that should be applied to this parameter.

Accepts: 'linear', 'log', 'reverse_log'
conditional_parameter_spec (Dict[str, _ParameterSpec]):
Optional. The conditional parameters associated with the object. The dictionary key
is the ID of the conditional parameter and the dictionary value is one of
`IntegerParameterSpec`, `CategoricalParameterSpec`, or `DiscreteParameterSpec`
parent_values (Sequence[Union[int, float, str]]):
Optional. This argument is only needed when the object is a conditional parameter
and specifies the parent parameter's values for which the condition applies.
"""

super().__init__()
super().__init__(conditional_parameter_spec, parent_values)

self.min = min
self.max = max
Expand All @@ -142,6 +175,8 @@ def __init__(
min: int,
max: int,
scale: str,
conditional_parameter_spec: Optional[Dict[str, "_ParameterSpec"]] = None,
parent_values: Optional[Sequence[Union[int, float, str]]] = None,
):
"""
Value specification for a parameter in ``INTEGER`` type.
Expand All @@ -157,9 +192,18 @@ def __init__(
Required. The type of scaling that should be applied to this parameter.

Accepts: 'linear', 'log', 'reverse_log'
conditional_parameter_spec (Dict[str, _ParameterSpec]):
Optional. The conditional parameters associated with the object. The dictionary key
is the ID of the conditional parameter and the dictionary value is one of
`IntegerParameterSpec`, `CategoricalParameterSpec`, or `DiscreteParameterSpec`
parent_values (Sequence[int]):
Optional. This argument is only needed when the object is a conditional parameter
and specifies the parent parameter's values for which the condition applies.
"""

super().__init__()
super().__init__(
conditional_parameter_spec=conditional_parameter_spec,
parent_values=parent_values,
)

self.min = min
self.max = max
Expand All @@ -177,15 +221,26 @@ class CategoricalParameterSpec(_ParameterSpec):
def __init__(
self,
values: Sequence[str],
conditional_parameter_spec: Optional[Dict[str, "_ParameterSpec"]] = None,
parent_values: Optional[Sequence[Union[int, float, str]]] = None,
):
"""Value specification for a parameter in ``CATEGORICAL`` type.

Args:
values (Sequence[str]):
Required. The list of possible categories.
conditional_parameter_spec (Dict[str, _ParameterSpec]):
Optional. The conditional parameters associated with the object. The dictionary key
is the ID of the conditional parameter and the dictionary value is one of
`IntegerParameterSpec`, `CategoricalParameterSpec`, or `DiscreteParameterSpec`
parent_values (Sequence[str]):
Optional. This argument is only needed when the object is a conditional parameter
and specifies the parent parameter's values for which the condition applies.
"""

super().__init__()
super().__init__(
conditional_parameter_spec=conditional_parameter_spec,
parent_values=parent_values,
)

self.values = values

Expand All @@ -202,6 +257,8 @@ def __init__(
self,
values: Sequence[float],
scale: str,
conditional_parameter_spec: Optional[Dict[str, "_ParameterSpec"]] = None,
parent_values: Optional[Sequence[Union[int, float, str]]] = None,
):
"""Value specification for a parameter in ``DISCRETE`` type.

Expand All @@ -216,9 +273,18 @@ def __init__(
Required. The type of scaling that should be applied to this parameter.

Accepts: 'linear', 'log', 'reverse_log'
conditional_parameter_spec (Dict[str, _ParameterSpec]):
Optional. The conditional parameters associated with the object. The dictionary key
is the ID of the conditional parameter and the dictionary value is one of
`IntegerParameterSpec`, `CategoricalParameterSpec`, or `DiscreteParameterSpec`
parent_values (Sequence[float]):
Optional. This argument is only needed when the object is a conditional parameter
and specifies the parent parameter's values for which the condition applies.
"""

super().__init__()
super().__init__(
conditional_parameter_spec=conditional_parameter_spec,
parent_values=parent_values,
)

self.values = values
self.scale = scale
125 changes: 111 additions & 14 deletions tests/unit/aiplatform/test_hyperparameter_tuning_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,8 +109,34 @@
parameter_id="batch_size",
scale_type=gca_study_compat.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
discrete_value_spec=gca_study_compat.StudySpec.ParameterSpec.DiscreteValueSpec(
values=[16, 32]
values=[4, 8, 16, 32, 64]
),
conditional_parameter_specs=[
gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec(
parent_discrete_values=gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec.DiscreteValueCondition(
values=[32, 64]
),
parameter_spec=gca_study_compat.StudySpec.ParameterSpec(
double_value_spec=gca_study_compat.StudySpec.ParameterSpec.DoubleValueSpec(
min_value=1e-07, max_value=1
),
scale_type=gca_study_compat.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
parameter_id="decay",
),
),
gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec(
parent_discrete_values=gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec.DiscreteValueCondition(
values=[4, 8, 16]
),
parameter_spec=gca_study_compat.StudySpec.ParameterSpec(
double_value_spec=gca_study_compat.StudySpec.ParameterSpec.DoubleValueSpec(
min_value=1e-07, max_value=1
),
scale_type=gca_study_compat.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
parameter_id="learning_rate",
),
),
],
),
],
algorithm=gca_study_compat.StudySpec.Algorithm.RANDOM_SEARCH,
Expand Down Expand Up @@ -377,6 +403,12 @@ def test_create_hyperparameter_tuning_job(
base_output_dir=test_custom_job._TEST_BASE_OUTPUT_DIR,
)

conditional_parameter_decay = hpt.DoubleParameterSpec(
rosiezou marked this conversation as resolved.
Show resolved Hide resolved
min=1e-07, max=1, scale="linear", parent_values=[32, 64]
)
conditional_parameter_lr = hpt.DoubleParameterSpec(
min=1e-07, max=1, scale="linear", parent_values=[4, 8, 16]
)
job = aiplatform.HyperparameterTuningJob(
display_name=_TEST_DISPLAY_NAME,
custom_job=custom_job,
Expand All @@ -388,7 +420,12 @@ def test_create_hyperparameter_tuning_job(
values=["relu", "sigmoid", "elu", "selu", "tanh"]
),
"batch_size": hpt.DiscreteParameterSpec(
values=[16, 32], scale="linear"
values=[4, 8, 16, 32, 64],
scale="linear",
conditional_parameter_spec={
"decay": conditional_parameter_decay,
"learning_rate": conditional_parameter_lr,
},
),
},
parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT,
Expand Down Expand Up @@ -442,7 +479,12 @@ def test_create_hyperparameter_tuning_job_with_timeout(
worker_pool_specs=test_custom_job._TEST_WORKER_POOL_SPEC,
base_output_dir=test_custom_job._TEST_BASE_OUTPUT_DIR,
)

conditional_parameter_decay = hpt.DoubleParameterSpec(
min=1e-07, max=1, scale="linear", parent_values=[32, 64]
)
conditional_parameter_lr = hpt.DoubleParameterSpec(
min=1e-07, max=1, scale="linear", parent_values=[4, 8, 16]
)
job = aiplatform.HyperparameterTuningJob(
display_name=_TEST_DISPLAY_NAME,
custom_job=custom_job,
Expand All @@ -454,7 +496,12 @@ def test_create_hyperparameter_tuning_job_with_timeout(
values=["relu", "sigmoid", "elu", "selu", "tanh"]
),
"batch_size": hpt.DiscreteParameterSpec(
values=[16, 32], scale="linear"
values=[4, 8, 16, 32, 64],
scale="linear",
conditional_parameter_spec={
"decay": conditional_parameter_decay,
"learning_rate": conditional_parameter_lr,
},
),
},
parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT,
Expand Down Expand Up @@ -503,7 +550,12 @@ def test_run_hyperparameter_tuning_job_with_fail_raises(
worker_pool_specs=test_custom_job._TEST_WORKER_POOL_SPEC,
base_output_dir=test_custom_job._TEST_BASE_OUTPUT_DIR,
)

conditional_parameter_decay = hpt.DoubleParameterSpec(
min=1e-07, max=1, scale="linear", parent_values=[32, 64]
)
conditional_parameter_lr = hpt.DoubleParameterSpec(
min=1e-07, max=1, scale="linear", parent_values=[4, 8, 16]
)
job = aiplatform.HyperparameterTuningJob(
display_name=_TEST_DISPLAY_NAME,
custom_job=custom_job,
Expand All @@ -515,7 +567,12 @@ def test_run_hyperparameter_tuning_job_with_fail_raises(
values=["relu", "sigmoid", "elu", "selu", "tanh"]
),
"batch_size": hpt.DiscreteParameterSpec(
values=[16, 32], scale="linear"
values=[4, 8, 16, 32, 64],
scale="linear",
conditional_parameter_spec={
"decay": conditional_parameter_decay,
"learning_rate": conditional_parameter_lr,
},
),
},
parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT,
Expand Down Expand Up @@ -562,7 +619,12 @@ def test_run_hyperparameter_tuning_job_with_fail_at_creation(self):
worker_pool_specs=test_custom_job._TEST_WORKER_POOL_SPEC,
base_output_dir=test_custom_job._TEST_BASE_OUTPUT_DIR,
)

conditional_parameter_decay = hpt.DoubleParameterSpec(
min=1e-07, max=1, scale="linear", parent_values=[32, 64]
)
conditional_parameter_lr = hpt.DoubleParameterSpec(
min=1e-07, max=1, scale="linear", parent_values=[4, 8, 16]
)
job = aiplatform.HyperparameterTuningJob(
display_name=_TEST_DISPLAY_NAME,
custom_job=custom_job,
Expand All @@ -574,7 +636,12 @@ def test_run_hyperparameter_tuning_job_with_fail_at_creation(self):
values=["relu", "sigmoid", "elu", "selu", "tanh"]
),
"batch_size": hpt.DiscreteParameterSpec(
values=[16, 32], scale="linear"
values=[4, 8, 16, 32, 64],
scale="linear",
conditional_parameter_spec={
"decay": conditional_parameter_decay,
"learning_rate": conditional_parameter_lr,
},
),
},
parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT,
Expand Down Expand Up @@ -627,7 +694,12 @@ def test_hyperparameter_tuning_job_get_state_raises_without_run(self):
worker_pool_specs=test_custom_job._TEST_WORKER_POOL_SPEC,
base_output_dir=test_custom_job._TEST_BASE_OUTPUT_DIR,
)

conditional_parameter_decay = hpt.DoubleParameterSpec(
min=1e-07, max=1, scale="linear", parent_values=[32, 64]
)
conditional_parameter_lr = hpt.DoubleParameterSpec(
min=1e-07, max=1, scale="linear", parent_values=[4, 8, 16]
)
job = aiplatform.HyperparameterTuningJob(
display_name=_TEST_DISPLAY_NAME,
custom_job=custom_job,
Expand All @@ -639,7 +711,12 @@ def test_hyperparameter_tuning_job_get_state_raises_without_run(self):
values=["relu", "sigmoid", "elu", "selu", "tanh"]
),
"batch_size": hpt.DiscreteParameterSpec(
values=[16, 32, 64], scale="linear"
values=[4, 8, 16, 32, 64],
scale="linear",
conditional_parameter_spec={
"decay": conditional_parameter_decay,
"learning_rate": conditional_parameter_lr,
},
),
},
parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT,
Expand Down Expand Up @@ -685,7 +762,12 @@ def test_create_hyperparameter_tuning_job_with_tensorboard(
worker_pool_specs=test_custom_job._TEST_WORKER_POOL_SPEC,
base_output_dir=test_custom_job._TEST_BASE_OUTPUT_DIR,
)

conditional_parameter_decay = hpt.DoubleParameterSpec(
min=1e-07, max=1, scale="linear", parent_values=[32, 64]
)
conditional_parameter_lr = hpt.DoubleParameterSpec(
min=1e-07, max=1, scale="linear", parent_values=[4, 8, 16]
)
job = aiplatform.HyperparameterTuningJob(
display_name=_TEST_DISPLAY_NAME,
custom_job=custom_job,
Expand All @@ -697,7 +779,12 @@ def test_create_hyperparameter_tuning_job_with_tensorboard(
values=["relu", "sigmoid", "elu", "selu", "tanh"]
),
"batch_size": hpt.DiscreteParameterSpec(
values=[16, 32], scale="linear"
values=[4, 8, 16, 32, 64],
scale="linear",
conditional_parameter_spec={
"decay": conditional_parameter_decay,
"learning_rate": conditional_parameter_lr,
},
),
},
parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT,
Expand Down Expand Up @@ -757,7 +844,12 @@ def test_create_hyperparameter_tuning_job_with_enable_web_access(
worker_pool_specs=test_custom_job._TEST_WORKER_POOL_SPEC,
base_output_dir=test_custom_job._TEST_BASE_OUTPUT_DIR,
)

conditional_parameter_decay = hpt.DoubleParameterSpec(
min=1e-07, max=1, scale="linear", parent_values=[32, 64]
)
conditional_parameter_lr = hpt.DoubleParameterSpec(
min=1e-07, max=1, scale="linear", parent_values=[4, 8, 16]
)
job = aiplatform.HyperparameterTuningJob(
display_name=_TEST_DISPLAY_NAME,
custom_job=custom_job,
Expand All @@ -769,7 +861,12 @@ def test_create_hyperparameter_tuning_job_with_enable_web_access(
values=["relu", "sigmoid", "elu", "selu", "tanh"]
),
"batch_size": hpt.DiscreteParameterSpec(
values=[16, 32], scale="linear"
values=[4, 8, 16, 32, 64],
scale="linear",
conditional_parameter_spec={
"decay": conditional_parameter_decay,
"learning_rate": conditional_parameter_lr,
},
),
},
parallel_trial_count=_TEST_PARALLEL_TRIAL_COUNT,
Expand Down