From 401469f1dd6c7d05bab0efe738a98391cb83e0c8 Mon Sep 17 00:00:00 2001 From: Martin Fitzner Date: Fri, 17 May 2024 17:37:55 +0200 Subject: [PATCH 01/12] Fix operator application --- baybe/kernels/composite.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/baybe/kernels/composite.py b/baybe/kernels/composite.py index e9d62bb82..07a04c988 100644 --- a/baybe/kernels/composite.py +++ b/baybe/kernels/composite.py @@ -1,5 +1,5 @@ """Composite kernels (that is, kernels composed of other kernels).""" - +from functools import reduce from operator import add, mul from typing import Optional @@ -56,7 +56,7 @@ class AdditiveKernel(Kernel): def to_gpytorch(self, *args, **kwargs): # noqa: D102 # See base class. - return add(*(k.to_gpytorch(*args, **kwargs) for k in self.base_kernels)) + return reduce(add, (k.to_gpytorch(*args, **kwargs) for k in self.base_kernels)) @define(frozen=True) @@ -71,4 +71,4 @@ class ProductKernel(Kernel): def to_gpytorch(self, *args, **kwargs): # noqa: D102 # See base class. - return mul(*(k.to_gpytorch(*args, **kwargs) for k in self.base_kernels)) + return reduce(mul, (k.to_gpytorch(*args, **kwargs) for k in self.base_kernels)) From 203c6dcf40b39eb42067eb4488b6b1f889bbd4b0 Mon Sep 17 00:00:00 2001 From: Martin Fitzner Date: Fri, 17 May 2024 12:28:44 +0200 Subject: [PATCH 02/12] Add kernels --- baybe/kernels/basic.py | 194 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 193 insertions(+), 1 deletion(-) diff --git a/baybe/kernels/basic.py b/baybe/kernels/basic.py index f4805e69a..fe9f86695 100644 --- a/baybe/kernels/basic.py +++ b/baybe/kernels/basic.py @@ -4,7 +4,7 @@ from attrs import define, field from attrs.converters import optional as optional_c -from attrs.validators import in_, instance_of +from attrs.validators import gt, in_, instance_of from attrs.validators import optional as optional_v from baybe.kernels.base import Kernel @@ -13,6 +13,62 @@ from baybe.utils.validation import finite_float +@define(frozen=True) +class CosineKernel(Kernel): + """A cosine kernel.""" + + period_length_prior: Optional[Prior] = field( + default=None, validator=optional_v(instance_of(Prior)) + ) + """An optional prior on the kernel periodic length.""" + + period_length_initial_value: Optional[float] = field( + default=None, converter=optional_c(float), validator=optional_v(finite_float) + ) + """An optional initial value for the kernel periodic length.""" + + def to_gpytorch(self, *args, **kwargs): # noqa: D102 + # See base class. + import torch + + from baybe.utils.torch import DTypeFloatTorch + + gpytorch_kernel = super().to_gpytorch(*args, **kwargs) + if (initial_value := self.period_length_initial_value) is not None: + gpytorch_kernel.period_length = torch.tensor( + initial_value, dtype=DTypeFloatTorch + ) + return gpytorch_kernel + + +@define(frozen=True) +class LinearKernel(Kernel): + """A linear kernel.""" + + variance_prior: Optional[Prior] = field( + default=None, validator=optional_v(instance_of(Prior)) + ) + """An optional prior on the variance parameter.""" + + variance_initial_value: Optional[float] = field( + default=None, converter=optional_c(float), validator=optional_v(finite_float) + ) + """An optional initial value for the variance parameter.""" + + def to_gpytorch(self, *args, **kwargs): # noqa: D102 + # See base class. + import torch + + from baybe.utils.torch import DTypeFloatTorch + + gpytorch_kernel = super().to_gpytorch(*args, **kwargs) + if (initial_value := self.variance_initial_value) is not None: + gpytorch_kernel.variance = torch.tensor( + initial_value, dtype=DTypeFloatTorch + ) + return gpytorch_kernel + + @define(frozen=True) class MaternKernel(Kernel): """A Matern kernel using a smoothness parameter.""" @@ -34,3 +90,139 @@ class MaternKernel(Kernel): default=None, converter=optional_c(float), validator=optional_v(finite_float) ) """An optional initial value for the kernel lengthscale.""" + + +@define(frozen=True) +class PeriodicKernel(Kernel): + """A periodic kernel.""" + + lengthscale_prior: Optional[Prior] = field( + default=None, validator=optional_v(instance_of(Prior)) + ) + """An optional prior on the kernel lengthscale.""" + + lengthscale_initial_value: Optional[float] = field( + default=None, converter=optional_c(float), validator=optional_v(finite_float) + ) + """An optional initial value for the kernel lengthscale.""" + + period_length_prior: Optional[Prior] = field( + default=None, validator=optional_v(instance_of(Prior)) + ) + """An optional prior on the kernel periodic length.""" + + period_length_initial_value: Optional[float] = field( + default=None, converter=optional_c(float), validator=optional_v(finite_float) + ) + """An optional initial value for the kernel periodic length.""" + + def to_gpytorch(self, *args, **kwargs): # noqa: D102 + # See base class. + import torch + + from baybe.utils.torch import DTypeFloatTorch + + gpytorch_kernel = super().to_gpytorch(*args, **kwargs) + # lengthscale is handled by the base class + + if (initial_value := self.period_length_initial_value) is not None: + gpytorch_kernel.period_length = torch.tensor( + initial_value, dtype=DTypeFloatTorch + ) + return gpytorch_kernel + + +@define(frozen=True) +class PiecewisePolynomialKernel(Kernel): + """A piecewise polynomial kernel.""" + + q: float = field(converter=int, validator=in_([0, 1, 2, 3]), default=2) + """A smoothness parameter.""" + + lengthscale_prior: Optional[Prior] = field( + default=None, validator=optional_v(instance_of(Prior)) + ) + """An optional prior on the kernel lengthscale.""" + + lengthscale_initial_value: Optional[float] = field( + default=None, converter=optional_c(float), validator=optional_v(finite_float) + ) + """An optional initial value for the kernel lengthscale.""" + + +@define(frozen=True) +class PolynomialKernel(Kernel): + """A polynomial kernel.""" + + power: int = field(converter=int) + """The power of the polynomial term.""" + + offset_prior: Optional[Prior] = field( + default=None, validator=optional_v(instance_of(Prior)) + ) + """An optional prior on the kernel offset.""" + + offset_initial_value: Optional[float] = field( + default=None, converter=optional_c(float), validator=optional_v(finite_float) + ) + """An optional initial value for the kernel offset.""" + + def to_gpytorch(self, *args, **kwargs): # noqa: D102 + # See base class. + import torch + + from baybe.utils.torch import DTypeFloatTorch + + gpytorch_kernel = super().to_gpytorch(*args, **kwargs) + + if (initial_value := self.offset_initial_value) is not None: + gpytorch_kernel.offset = torch.tensor(initial_value, dtype=DTypeFloatTorch) + return gpytorch_kernel + + +@define(frozen=True) +class RBFKernel(Kernel): + """A radial basis function (RBF) kernel.""" + + lengthscale_prior: Optional[Prior] = field( + default=None, validator=optional_v(instance_of(Prior)) + ) + """An optional prior on the kernel lengthscale.""" + + lengthscale_initial_value: Optional[float] = field( + default=None, converter=optional_c(float), validator=optional_v(finite_float) + ) + """An optional initial value for the kernel lengthscale.""" + + +@define(frozen=True) +class RFFKernel(Kernel): + """A random Fourier features (RFF) kernel.""" + + num_samples: int = field(converter=int, validator=gt(0)) + """The number of frequencies to draw.""" + + lengthscale_prior: Optional[Prior] = field( + default=None, validator=optional_v(instance_of(Prior)) + ) + """An optional prior on the kernel lengthscale.""" + + lengthscale_initial_value: Optional[float] = field( + default=None, converter=optional_c(float), validator=optional_v(finite_float) + ) + """An optional initial value for the kernel lengthscale.""" + + +@define(frozen=True) +class RQKernel(Kernel): + """A rational quadratic (RQ) kernel.""" + + lengthscale_prior: Optional[Prior] = field( + default=None, validator=optional_v(instance_of(Prior)) + ) + """An optional prior on the kernel lengthscale.""" + + lengthscale_initial_value: Optional[float] = field( + default=None, converter=optional_c(float), validator=optional_v(finite_float) + ) + """An optional initial value for the kernel lengthscale.""" From a8a50f0eb3c9bb353879200f3595fae1c4cdee42 Mon Sep 17 00:00:00 2001 From: Martin Fitzner Date: Fri, 17 May 2024 12:29:09 +0200 Subject: [PATCH 03/12] Add hypothesis --- tests/hypothesis_strategies/kernels.py | 86 +++++++++++++++++++++++++- 1 file changed, 84 insertions(+), 2 deletions(-) diff --git a/tests/hypothesis_strategies/kernels.py b/tests/hypothesis_strategies/kernels.py index d47fb179c..e85a8c1e1 100644 --- a/tests/hypothesis_strategies/kernels.py +++ b/tests/hypothesis_strategies/kernels.py @@ -4,7 +4,17 @@ import hypothesis.strategies as st -from baybe.kernels.basic import MaternKernel +from baybe.kernels.basic import ( + CosineKernel, + LinearKernel, + MaternKernel, + PeriodicKernel, + PiecewisePolynomialKernel, + PolynomialKernel, + RBFKernel, + RFFKernel, + RQKernel, +) from baybe.kernels.composite import AdditiveKernel, ProductKernel, ScaleKernel from ..hypothesis_strategies.basic import finite_floats @@ -19,6 +29,20 @@ class KernelType(Enum): PRODUCT = "PRODUCT" +cosine_kernels = st.builds( + CosineKernel, + period_length_prior=st.one_of(st.none(), priors), + period_length_initial_value=st.one_of(st.none(), finite_floats()), +) +"""A strategy that generates Cosine kernels.""" + +linear_kernels = st.builds( + LinearKernel, + variance_prior=st.one_of(st.none(), priors), + variance_initial_value=st.one_of(st.none(), finite_floats()), +) +"""A strategy that generates linear kernels.""" + matern_kernels = st.builds( MaternKernel, nu=st.sampled_from((0.5, 1.5, 2.5)), @@ -27,8 +51,66 @@ class KernelType(Enum): ) """A strategy that generates Matern kernels.""" +periodic_kernels = st.builds( + PeriodicKernel, + lengthscale_prior=st.one_of(st.none(), priors), + lengthscale_initial_value=st.one_of(st.none(), finite_floats()), + period_length_prior=st.one_of(st.none(), priors), + period_length_initial_value=st.one_of(st.none(), finite_floats()), +) +"""A strategy that generates periodic kernels.""" + +piecewise_polynomial_kernels = st.builds( + PiecewisePolynomialKernel, + q=st.integers(min_value=0, max_value=3), + lengthscale_prior=st.one_of(st.none(), priors), + lengthscale_initial_value=st.one_of(st.none(), finite_floats()), +) +"""A strategy that generates piecewise polynomial kernels.""" -base_kernels = st.one_of([matern_kernels]) +polynomial_kernels = st.builds( + PolynomialKernel, + power=st.integers(), + offset_prior=st.one_of(st.none(), priors), + offset_initial_value=st.one_of(st.none(), finite_floats()), +) +"""A strategy that generates polynomial kernels.""" + +rbf_kernels = st.builds( + RBFKernel, + lengthscale_prior=st.one_of(st.none(), priors), + lengthscale_initial_value=st.one_of(st.none(), finite_floats()), +) +"""A strategy that generates radial basis function (RBF) kernels.""" + +rff_kernels = st.builds( + RFFKernel, + num_samples=st.integers(min_value=1), + lengthscale_prior=st.one_of(st.none(), priors), + lengthscale_initial_value=st.one_of(st.none(), finite_floats()), +) +"""A strategy that generates radial basis function (RBF) kernels.""" + +rq_kernels = st.builds( + RQKernel, + lengthscale_prior=st.one_of(st.none(), priors), + lengthscale_initial_value=st.one_of(st.none(), finite_floats()), +) +"""A strategy that generates rational quadratic (RQ) kernels.""" + +base_kernels = st.one_of( + [ + matern_kernels, # on top because it is the default for many use cases + linear_kernels, + rbf_kernels, + rq_kernels, + cosine_kernels, + rff_kernels, + piecewise_polynomial_kernels, + polynomial_kernels, + periodic_kernels, + ] +) """A strategy that generates base kernels to be used within more complex kernels.""" From 6a27931ad1db01bc8289fa65b309d0de0d566c2e Mon Sep 17 00:00:00 2001 From: Martin Fitzner Date: Fri, 17 May 2024 12:29:16 +0200 Subject: [PATCH 04/12] Add iteration tests --- tests/test_iterations.py | 33 +++++++++++++++++++++++++++++---- 1 file changed, 29 insertions(+), 4 deletions(-) diff --git a/tests/test_iterations.py b/tests/test_iterations.py index 094cb4833..ad5ed829d 100644 --- a/tests/test_iterations.py +++ b/tests/test_iterations.py @@ -4,7 +4,17 @@ import pytest from baybe.acquisition.base import AcquisitionFunction -from baybe.kernels.basic import MaternKernel +from baybe.kernels.base import Kernel +from baybe.kernels.basic import ( + LinearKernel, + MaternKernel, + PeriodicKernel, + PiecewisePolynomialKernel, + PolynomialKernel, + RBFKernel, + RFFKernel, + RQKernel, +) from baybe.kernels.composite import AdditiveKernel, ProductKernel, ScaleKernel from baybe.priors import ( GammaPrior, @@ -131,16 +141,31 @@ SmoothedBoxPrior(0, 3, 0.1), ] -valid_base_kernels = [MaternKernel(lengthscale_prior=prior) for prior in valid_priors] +valid_base_kernels: list[Kernel] = [ + cls(**arg_dict) + for prior in valid_priors + for cls, arg_dict in [ + (MaternKernel, {"lengthscale_prior": prior}), + (LinearKernel, {"variance_prior": prior}), + (PeriodicKernel, {"period_length_prior": prior}), + (PeriodicKernel, {"lengthscale_prior": prior}), + (PiecewisePolynomialKernel, {"lengthscale_prior": prior}), + (PolynomialKernel, {"offset_prior": prior, "power": 2}), + (RBFKernel, {"lengthscale_prior": prior}), + (RQKernel, {"lengthscale_prior": prior}), + (RFFKernel, {"lengthscale_prior": prior, "num_samples": 5}), + ] +] valid_scale_kernels = [ - ScaleKernel(base_kernel=base_kernel, outputscale_prior=prior) + ScaleKernel(base_kernel=base_kernel, outputscale_prior=HalfCauchyPrior(scale=1)) for base_kernel in valid_base_kernels - for prior in valid_priors ] valid_composite_kernels = [ AdditiveKernel([MaternKernel(1.5), MaternKernel(2.5)]), + AdditiveKernel([PolynomialKernel(1), PolynomialKernel(2), PolynomialKernel(3)]), + AdditiveKernel([RBFKernel(), RQKernel(), PolynomialKernel(1)]), ProductKernel([MaternKernel(1.5), MaternKernel(2.5)]), ] From b50d4dae02a780efc24730ae53a4cc4fb637769f Mon Sep 17 00:00:00 2001 From: Martin Fitzner Date: Fri, 17 May 2024 17:58:55 +0200 Subject: [PATCH 05/12] Remove prior iteration tests --- tests/test_iterations.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/tests/test_iterations.py b/tests/test_iterations.py index ad5ed829d..7e3ff3949 100644 --- a/tests/test_iterations.py +++ b/tests/test_iterations.py @@ -203,15 +203,6 @@ def test_iter_nonmc_acquisition_function(campaign, n_iterations, batch_size): run_iterations(campaign, n_iterations, batch_size) -@pytest.mark.slow -@pytest.mark.parametrize( - "lengthscale_prior", valid_priors, ids=[c.__class__ for c in valid_priors] -) -@pytest.mark.parametrize("n_iterations", [3], ids=["i3"]) -def test_iter_prior(campaign, n_iterations, batch_size): - run_iterations(campaign, n_iterations, batch_size) - - @pytest.mark.slow @pytest.mark.parametrize( "kernel", valid_kernels, ids=[c.__class__ for c in valid_kernels] From d28ef32c9b7da38808857e7984e012db9ee3940c Mon Sep 17 00:00:00 2001 From: Martin Fitzner Date: Fri, 17 May 2024 18:05:16 +0200 Subject: [PATCH 06/12] Update kernel __init__.py --- baybe/kernels/__init__.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/baybe/kernels/__init__.py b/baybe/kernels/__init__.py index 7eb009e2e..69a54e1bc 100644 --- a/baybe/kernels/__init__.py +++ b/baybe/kernels/__init__.py @@ -1,11 +1,29 @@ """Kernels for Gaussian process surrogate models.""" -from baybe.kernels.basic import MaternKernel +from baybe.kernels.basic import ( + CosineKernel, + LinearKernel, + MaternKernel, + PeriodicKernel, + PiecewisePolynomialKernel, + PolynomialKernel, + RBFKernel, + RFFKernel, + RQKernel, +) from baybe.kernels.composite import AdditiveKernel, ProductKernel, ScaleKernel __all__ = [ "AdditiveKernel", + "CosineKernel", + "LinearKernel", "MaternKernel", + "PeriodicKernel", + "PiecewisePolynomialKernel", + "PolynomialKernel", "ProductKernel", + "RBFKernel", + "RFFKernel", + "RQKernel", "ScaleKernel", ] From 1b8e46f1f244783c843249e50c432332bf02ce72 Mon Sep 17 00:00:00 2001 From: Martin Fitzner Date: Tue, 21 May 2024 10:15:17 +0200 Subject: [PATCH 07/12] Add more composite kernel tests --- tests/test_iterations.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/test_iterations.py b/tests/test_iterations.py index 7e3ff3949..2105f76b1 100644 --- a/tests/test_iterations.py +++ b/tests/test_iterations.py @@ -167,6 +167,14 @@ AdditiveKernel([PolynomialKernel(1), PolynomialKernel(2), PolynomialKernel(3)]), AdditiveKernel([RBFKernel(), RQKernel(), PolynomialKernel(1)]), ProductKernel([MaternKernel(1.5), MaternKernel(2.5)]), + ProductKernel([RBFKernel(), RQKernel(), PolynomialKernel(1)]), + ProductKernel([PolynomialKernel(1), PolynomialKernel(2), PolynomialKernel(3)]), + AdditiveKernel( + [ + ProductKernel([MaternKernel(1.5), MaternKernel(2.5)]), + AdditiveKernel([MaternKernel(1.5), MaternKernel(2.5)]), + ] + ), ] valid_kernels = valid_base_kernels + valid_scale_kernels + valid_composite_kernels From 1bd6fd0bf08ccef15941d68a187fb7988a492caa Mon Sep 17 00:00:00 2001 From: Martin Fitzner Date: Tue, 21 May 2024 10:36:40 +0200 Subject: [PATCH 08/12] Fix text --- baybe/kernels/basic.py | 13 ++++++------- tests/hypothesis_strategies/kernels.py | 4 ++-- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/baybe/kernels/basic.py b/baybe/kernels/basic.py index fe9f86695..20617000a 100644 --- a/baybe/kernels/basic.py +++ b/baybe/kernels/basic.py @@ -20,12 +20,12 @@ class CosineKernel(Kernel): period_length_prior: Optional[Prior] = field( default=None, validator=optional_v(instance_of(Prior)) ) - """An optional prior on the kernel periodic length.""" + """An optional prior on the kernel period length.""" period_length_initial_value: Optional[float] = field( default=None, converter=optional_c(float), validator=optional_v(finite_float) ) - """An optional initial value for the kernel periodic length.""" + """An optional initial value for the kernel period length.""" def to_gpytorch(self, *args, **kwargs): # noqa: D102 # See base class. @@ -48,12 +48,12 @@ class LinearKernel(Kernel): variance_prior: Optional[Prior] = field( default=None, validator=optional_v(instance_of(Prior)) ) - """An optional prior on the variance parameter.""" + """An optional prior on the kernel variance parameter.""" variance_initial_value: Optional[float] = field( default=None, converter=optional_c(float), validator=optional_v(finite_float) ) - """An optional initial value for the variance parameter.""" + """An optional initial value for the kernel variance parameter.""" def to_gpytorch(self, *args, **kwargs): # noqa: D102 # See base class. @@ -109,12 +109,12 @@ class PeriodicKernel(Kernel): period_length_prior: Optional[Prior] = field( default=None, validator=optional_v(instance_of(Prior)) ) - """An optional prior on the kernel periodic length.""" + """An optional prior on the kernel period length.""" period_length_initial_value: Optional[float] = field( default=None, converter=optional_c(float), validator=optional_v(finite_float) ) - """An optional initial value for the kernel periodic length.""" + """An optional initial value for the kernel period length.""" def to_gpytorch(self, *args, **kwargs): # noqa: D102 # See base class. @@ -174,7 +174,6 @@ def to_gpytorch(self, *args, **kwargs): # noqa: D102 from baybe.utils.torch import DTypeFloatTorch gpytorch_kernel = super().to_gpytorch(*args, **kwargs) - if (initial_value := self.offset_initial_value) is not None: gpytorch_kernel.offset = torch.tensor(initial_value, dtype=DTypeFloatTorch) return gpytorch_kernel diff --git a/tests/hypothesis_strategies/kernels.py b/tests/hypothesis_strategies/kernels.py index e85a8c1e1..95ca9b38a 100644 --- a/tests/hypothesis_strategies/kernels.py +++ b/tests/hypothesis_strategies/kernels.py @@ -34,7 +34,7 @@ class KernelType(Enum): period_length_prior=st.one_of(st.none(), priors), period_length_initial_value=st.one_of(st.none(), finite_floats()), ) -"""A strategy that generates Cosine kernels.""" +"""A strategy that generates cosine kernels.""" linear_kernels = st.builds( LinearKernel, @@ -89,7 +89,7 @@ class KernelType(Enum): lengthscale_prior=st.one_of(st.none(), priors), lengthscale_initial_value=st.one_of(st.none(), finite_floats()), ) -"""A strategy that generates radial basis function (RBF) kernels.""" +"""A strategy that generates random Fourier features (RFF) kernels.""" rq_kernels = st.builds( RQKernel, From cce7be5890ef22d35f4062456fe5da63d3a91f67 Mon Sep 17 00:00:00 2001 From: Martin Fitzner Date: Tue, 21 May 2024 12:53:31 +0200 Subject: [PATCH 09/12] Improve integer specifications --- baybe/kernels/basic.py | 8 ++++---- tests/hypothesis_strategies/kernels.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/baybe/kernels/basic.py b/baybe/kernels/basic.py index 20617000a..985fa7361 100644 --- a/baybe/kernels/basic.py +++ b/baybe/kernels/basic.py @@ -4,7 +4,7 @@ from attrs import define, field from attrs.converters import optional as optional_c -from attrs.validators import gt, in_, instance_of +from attrs.validators import ge, in_, instance_of from attrs.validators import optional as optional_v from baybe.kernels.base import Kernel @@ -136,7 +136,7 @@ def to_gpytorch(self, *args, **kwargs): # noqa: D102 class PiecewisePolynomialKernel(Kernel): """A piecewise polynomial kernel.""" - q: float = field(converter=int, validator=in_([0, 1, 2, 3]), default=2) + q: int = field(validator=in_([0, 1, 2, 3]), default=2) """A smoothness parameter.""" lengthscale_prior: Optional[Prior] = field( @@ -154,7 +154,7 @@ class PiecewisePolynomialKernel(Kernel): class PolynomialKernel(Kernel): """A polynomial kernel.""" - power: int = field(converter=int) + power: int = field(validator=[instance_of(int), ge(0)]) """The power of the polynomial term.""" offset_prior: Optional[Prior] = field( @@ -198,7 +198,7 @@ class RBFKernel(Kernel): class RFFKernel(Kernel): """A random Fourier features (RFF) kernel.""" - num_samples: int = field(converter=int, validator=gt(0)) + num_samples: int = field(validator=[instance_of(int), ge(1)]) """The number of frequencies to draw.""" lengthscale_prior: Optional[Prior] = field( diff --git a/tests/hypothesis_strategies/kernels.py b/tests/hypothesis_strategies/kernels.py index 95ca9b38a..960581223 100644 --- a/tests/hypothesis_strategies/kernels.py +++ b/tests/hypothesis_strategies/kernels.py @@ -70,7 +70,7 @@ class KernelType(Enum): polynomial_kernels = st.builds( PolynomialKernel, - power=st.integers(), + power=st.integers(min_value=0), offset_prior=st.one_of(st.none(), priors), offset_initial_value=st.one_of(st.none(), finite_floats()), ) From b476aeb05e0fd74ae538bf4e6c77b90c82c19f27 Mon Sep 17 00:00:00 2001 From: Martin Fitzner Date: Tue, 21 May 2024 12:57:46 +0200 Subject: [PATCH 10/12] Add kernel and prior GPyTorch reference --- baybe/kernels/__init__.py | 6 +++++- baybe/priors/__init__.py | 6 +++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/baybe/kernels/__init__.py b/baybe/kernels/__init__.py index 69a54e1bc..0cbbf72dc 100644 --- a/baybe/kernels/__init__.py +++ b/baybe/kernels/__init__.py @@ -1,4 +1,8 @@ -"""Kernels for Gaussian process surrogate models.""" +"""Kernels for Gaussian process surrogate models. + +The kernel classes mimic classes from GPyTorch. For details on specification and +arguments see https://docs.gpytorch.ai/en/stable/kernels.html. +""" from baybe.kernels.basic import ( CosineKernel, diff --git a/baybe/priors/__init__.py b/baybe/priors/__init__.py index b133c61f9..77f71ae67 100644 --- a/baybe/priors/__init__.py +++ b/baybe/priors/__init__.py @@ -1,4 +1,8 @@ -"""Prior distributions.""" +"""Prior distributions. + +The prior classes mimic classes from GPyTorch. For details on specification and +arguments see https://docs.gpytorch.ai/en/stable/priors.html. +""" from baybe.priors.basic import ( GammaPrior, From bd3d208fe2eaae28e35ce584c2fbe40618889d0a Mon Sep 17 00:00:00 2001 From: Martin Fitzner Date: Tue, 21 May 2024 12:59:36 +0200 Subject: [PATCH 11/12] Remove cosine kernel --- baybe/kernels/__init__.py | 2 -- baybe/kernels/basic.py | 28 -------------------------- tests/hypothesis_strategies/kernels.py | 9 --------- 3 files changed, 39 deletions(-) diff --git a/baybe/kernels/__init__.py b/baybe/kernels/__init__.py index 0cbbf72dc..9323a2b63 100644 --- a/baybe/kernels/__init__.py +++ b/baybe/kernels/__init__.py @@ -5,7 +5,6 @@ """ from baybe.kernels.basic import ( - CosineKernel, LinearKernel, MaternKernel, PeriodicKernel, @@ -19,7 +18,6 @@ __all__ = [ "AdditiveKernel", - "CosineKernel", "LinearKernel", "MaternKernel", "PeriodicKernel", diff --git a/baybe/kernels/basic.py b/baybe/kernels/basic.py index 985fa7361..4bfabaf04 100644 --- a/baybe/kernels/basic.py +++ b/baybe/kernels/basic.py @@ -13,34 +13,6 @@ from baybe.utils.validation import finite_float -@define(frozen=True) -class CosineKernel(Kernel): - """A cosine kernel.""" - - period_length_prior: Optional[Prior] = field( - default=None, validator=optional_v(instance_of(Prior)) - ) - """An optional prior on the kernel period length.""" - - period_length_initial_value: Optional[float] = field( - default=None, converter=optional_c(float), validator=optional_v(finite_float) - ) - """An optional initial value for the kernel period length.""" - - def to_gpytorch(self, *args, **kwargs): # noqa: D102 - # See base class. - import torch - - from baybe.utils.torch import DTypeFloatTorch - - gpytorch_kernel = super().to_gpytorch(*args, **kwargs) - if (initial_value := self.period_length_initial_value) is not None: - gpytorch_kernel.period_length = torch.tensor( - initial_value, dtype=DTypeFloatTorch - ) - return gpytorch_kernel - - @define(frozen=True) class LinearKernel(Kernel): """A linear kernel.""" diff --git a/tests/hypothesis_strategies/kernels.py b/tests/hypothesis_strategies/kernels.py index 960581223..479f6233f 100644 --- a/tests/hypothesis_strategies/kernels.py +++ b/tests/hypothesis_strategies/kernels.py @@ -5,7 +5,6 @@ import hypothesis.strategies as st from baybe.kernels.basic import ( - CosineKernel, LinearKernel, MaternKernel, PeriodicKernel, @@ -29,13 +28,6 @@ class KernelType(Enum): PRODUCT = "PRODUCT" -cosine_kernels = st.builds( - CosineKernel, - period_length_prior=st.one_of(st.none(), priors), - period_length_initial_value=st.one_of(st.none(), finite_floats()), -) -"""A strategy that generates cosine kernels.""" - linear_kernels = st.builds( LinearKernel, variance_prior=st.one_of(st.none(), priors), @@ -104,7 +96,6 @@ class KernelType(Enum): linear_kernels, rbf_kernels, rq_kernels, - cosine_kernels, rff_kernels, piecewise_polynomial_kernels, polynomial_kernels, From bc1bdd5ade26767f3aa49a23aca574081959134e Mon Sep 17 00:00:00 2001 From: Martin Fitzner Date: Fri, 17 May 2024 19:23:49 +0200 Subject: [PATCH 12/12] Update CHANGELOG.md --- CHANGELOG.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b2790450b..03dba78b6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,8 +9,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `mypy` for search space and objectives - Class hierarchy for objectives - Deserialization is now also possible from optional class name abbreviations -- `Kernel`, `MaternKernel`, `AdditiveKernel`, `ProductKernel` and `ScaleKernel` - classes for specifying kernels +- `AdditiveKernel`, `LinearKernel`, `MaternKernel`, `PeriodicKernel`, + `PiecewisePolynomialKernel`, `PolynomialKernel`, `ProductKernel`, `RBFKernel`, + `RFFKernel`, `RQKernel`, `ScaleKernel` classes for specifying kernels +- `GammaPrior`, `HalfCauchyPrior`, `NormalPrior`, `HalfNormalPrior`, `LogNormalPrior` + and `SmoothedBoxPrior` classes for specifying priors - `KernelFactory` protocol enabling context-dependent construction of kernels - Preset mechanism for `GaussianProcessSurrogate` - `hypothesis` strategies and roundtrip test for kernels, constraints, objectives, @@ -18,8 +21,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - New acquisition functions: `qSR`, `qNEI`, `LogEI`, `qLogEI`, `qLogNEI` - Serialization user guide - Basic deserialization tests using different class type specifiers -- `GammaPrior`, `HalfCauchyPrior`, `NormalPrior`, `HalfNormalPrior`, `LogNormalPrior` - and `SmoothedBoxPrior` can now be chosen as lengthscale prior - Environment variables user guide - Utility for estimating memory requirements of discrete product search space