Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add unified interface test utils #19

Merged
merged 7 commits into from
Sep 29, 2020
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 1 addition & 10 deletions test/likelihoods/bernoulli.jl
Original file line number Diff line number Diff line change
@@ -1,13 +1,4 @@
@testset "BernoulliLikelihood" begin
rng = MersenneTwister(123)
gp = GP(SqExponentialKernel())
x = rand(rng, 10)
y = rand(rng, 10)
lik = BernoulliLikelihood()
lgp = LatentGP(gp, lik, 1e-5)
lfgp = lgp(x)

@test typeof(lik(rand(rng, lfgp.fx))) <: Distribution
@test length(rand(rng, lik(rand(rng, lfgp.fx)))) == 10
@test Functors.functor(lik)[1] == ()
test_interface(lik, SqExponentialKernel(), rand(10))
end
22 changes: 4 additions & 18 deletions test/likelihoods/categorical.jl
Original file line number Diff line number Diff line change
@@ -1,22 +1,8 @@
@testset "CategoricalLikelihood" begin
rng = MersenneTwister(123)
gp = GP(IndependentMOKernel(SqExponentialKernel()))
lik = CategoricalLikelihood()
IN_DIM = 3
OUT_DIM = 4
OUT_DIM = 2 # one for the mean the other for the log-standard deviation
N = 10
x = [rand(rng, IN_DIM) for _=1:N]
X = MOInput(x, OUT_DIM)
lik = CategoricalLikelihood()
lgp = LatentGP(gp, lik, 1e-5)
lfgp = lgp(X)

Y = rand(rng, lfgp.fx)

y = [Y[[i + j*N for j in 0:(OUT_DIM - 1)]] for i in 1:N]
# Replace with mo_inverse_transform once it is merged

@test length(lik(rand(3)).p) == 4
@test lik(y) isa Distribution
@test length(rand(rng, lik(y))) == 10
@test Functors.functor(lik)[1] == ()
X = MOInput([rand(IN_DIM) for _ in 1:N], OUT_DIM)
test_interface(lik, IndependentMOKernel(SqExponentialKernel()), X)
end
30 changes: 4 additions & 26 deletions test/likelihoods/gaussian.jl
Original file line number Diff line number Diff line change
@@ -1,35 +1,13 @@
@testset "GaussianLikelihood" begin
rng = MersenneTwister(123)
gp = GP(SqExponentialKernel())
x = rand(rng, 10)
y = rand(rng, 10)
lik = GaussianLikelihood(1e-5)
lgp = LatentGP(gp, lik, 1e-5)
lfgp = lgp(x)

@test lik(rand(rng, lfgp.fx)) isa Distribution
@test length(rand(rng, lik(rand(rng, lfgp.fx)))) == 10
@test keys(Functors.functor(lik)[1]) == (:σ²,)
test_interface(lik, SqExponentialKernel(), rand(10); functor_args=(:σ²,))
end

@testset "HeteroscedasticGaussianLikelihood" begin
rng = MersenneTwister(123)
gp = GP(IndependentMOKernel(SqExponentialKernel()))
lik = HeteroscedasticGaussianLikelihood()
IN_DIM = 3
OUT_DIM = 2 # one for the mean the other for the log-standard deviation
N = 10
x = [rand(rng, IN_DIM) for _ in 1:N]
X = MOInput(x, OUT_DIM)
lik = HeteroscedasticGaussianLikelihood()
lgp = LatentGP(gp, lik, 1e-5)
lfgp = lgp(X)

Y = rand(rng, lfgp.fx)

y = [Y[[i + j*N for j in 0:(OUT_DIM - 1)]] for i in 1:N]
# Replace with mo_inverse_transform once it is merged

@test lik(y) isa Distribution
@test length(rand(rng, lik(y))) == 10
@test Functors.functor(lik)[1] == ()
X = MOInput([rand(IN_DIM) for _ in 1:N], OUT_DIM)
test_interface(lik, IndependentMOKernel(SqExponentialKernel()), X)
end
11 changes: 1 addition & 10 deletions test/likelihoods/poisson.jl
Original file line number Diff line number Diff line change
@@ -1,13 +1,4 @@
@testset "PoissonLikelihood" begin
rng = MersenneTwister(123)
gp = GP(SqExponentialKernel())
x = rand(rng, 10)
y = rand(rng, 10)
lik = PoissonLikelihood()
lgp = LatentGP(gp, lik, 1e-5)
lfgp = lgp(x)

@test lik(rand(rng, lfgp.fx)) isa Distribution
@test length(rand(rng, lik(rand(rng, lfgp.fx)))) == 10
@test Functors.functor(lik)[1] == ()
test_interface(lik, SqExponentialKernel(), rand(10))
end
2 changes: 2 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ using Distributions

@testset "GPLikelihoods.jl" begin

include("test_utils.jl")

@testset "likelihoods" begin
include("likelihoods/bernoulli.jl")
include("likelihoods/categorical.jl")
Expand Down
60 changes: 60 additions & 0 deletions test/test_utils.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
function test_interface(
rng::AbstractRNG,
lik,
k::KernelFunctions.Kernel,
x::AbstractVector;
functor_args=(),
)
sharanry marked this conversation as resolved.
Show resolved Hide resolved
gp = GP(k)
lgp = LatentGP(gp, lik, 1e-5)
lfgp = lgp(x)

# Check if likelihood produces a distribution
@test lik(rand(rng, lfgp.fx)) isa Distribution

N = length(x)
y = rand(rng, lfgp.fx)

if x isa MOInput
# TODO: replace with mo_inverse_transform
N = length(x.x)
y = [y[[i + j*N for j in 0:(x.out_dim - 1)]] for i in 1:N]
end

# Check if the likelihood samples are of correct length
@test length(rand(rng, lik(y))) == N

# Check if functor works properly
if functor_args == ()
@test Functors.functor(lik)[1] == functor_args
else
@test keys(Functors.functor(lik)[1]) == functor_args
end
end

"""
test_interface(
lik,
k::Kernel,
x::AbstractVector;
functor_args=(),
)
sharanry marked this conversation as resolved.
Show resolved Hide resolved

This function provides unified method to check the interface of the various likelihoods
sharanry marked this conversation as resolved.
Show resolved Hide resolved
defined. It checks if the likelihood produces a distribution, length of likelihood
samples is correct and if the functor works as intended.
"""
function test_interface(
lik,
k::KernelFunctions.Kernel,
x::AbstractVector;
kwargs...
)
test_interface(
Random.GLOBAL_RNG,
lik,
k,
x;
kwargs...
)
sharanry marked this conversation as resolved.
Show resolved Hide resolved
end