From 26a7de371960a92a24f14f5da760e63438b566ca Mon Sep 17 00:00:00 2001 From: odunbar Date: Mon, 26 Sep 2022 17:55:12 -0700 Subject: [PATCH] example to produce comparable figs --- examples/Emulator/GaussianProcess/plot_GP.jl | 12 ++++++------ .../Emulator/RandomFeature/optimize_and_plot_RF.jl | 6 +++--- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/examples/Emulator/GaussianProcess/plot_GP.jl b/examples/Emulator/GaussianProcess/plot_GP.jl index 5a815969c..09c822158 100644 --- a/examples/Emulator/GaussianProcess/plot_GP.jl +++ b/examples/Emulator/GaussianProcess/plot_GP.jl @@ -68,9 +68,9 @@ if !isdir(output_directory) end #create the machine learning tools: Gaussian Process -gppackage = GPJL() +gppackage = SKLJL() pred_type = YType() -gaussian_process = GaussianProcess(gppackage, noise_learn = true) +gaussian_process = GaussianProcess(gppackage, noise_learn = false) # Generate training data (x-y pairs, where x ∈ ℝ ᵖ, y ∈ ℝ ᵈ) # x = [x1, x2]: inputs/predictors/features/parameters @@ -92,7 +92,7 @@ gx[2, :] = g2x # Add noise η μ = zeros(d) -Σ = 0.1 * [[0.8, 0.0] [0.0, 0.5]] # d x d +Σ = 0.1 * [[0.8, 0.1] [0.1, 0.5]] # d x d noise_samples = rand(MvNormal(μ, Σ), n) # y = G(x) + η Y = gx .+ noise_samples @@ -182,9 +182,9 @@ println("GP trained") # Plot mean and variance of the predicted observables y1 and y2 # For this, we generate test points on a x1-x2 grid. -n_pts = 50 -x1 = range(0.0, stop = 2 * π, length = n_pts) -x2 = range(0.0, stop = 2 * π, length = n_pts) +n_pts = 200 +x1 = range(0.0, stop = (4.0/5.0) * 2 * π, length = n_pts) +x2 = range(0.0, stop = (4.0/5.0) * 2 * π, length = n_pts) X1, X2 = meshgrid(x1, x2) # Input for predict has to be of size N_samples x input_dim inputs = permutedims(hcat(X1[:], X2[:]), (2, 1)) diff --git a/examples/Emulator/RandomFeature/optimize_and_plot_RF.jl b/examples/Emulator/RandomFeature/optimize_and_plot_RF.jl index ed9b088fb..5abd2792a 100644 --- a/examples/Emulator/RandomFeature/optimize_and_plot_RF.jl +++ b/examples/Emulator/RandomFeature/optimize_and_plot_RF.jl @@ -125,7 +125,7 @@ n_features = 200 # hyperparameter prior μ_l = 5.0 -σ_l = 10.0 +σ_l = 5.0 prior_lengthscale = constrained_gaussian("lengthscale", μ_l, σ_l, 0.0, Inf, repeats = p) srfi = ScalarRandomFeatureInterface(n_features,prior_lengthscale) @@ -138,8 +138,8 @@ optimize_hyperparameters!(emulator) # although RF already optimized # Plot mean and variance of the predicted observables y1 and y2 # For this, we generate test points on a x1-x2 grid. n_pts = 200 -x1 = range(0.0, stop = 2 * π, length = n_pts) -x2 = range(0.0, stop = 2 * π, length = n_pts) +x1 = range(0.0, stop = 4.0/5.0* 2 * π, length = n_pts) +x2 = range(0.0, stop = 4.0/5.0* 2 * π, length = n_pts) X1, X2 = meshgrid(x1, x2) # Input for predict has to be of size N_samples x input_dim inputs = permutedims(hcat(X1[:], X2[:]), (2, 1))