Skip to content

Commit

Permalink
z-score only using training data
Browse files Browse the repository at this point in the history
  • Loading branch information
michaeldeistler committed Jan 22, 2021
1 parent 39d6954 commit ab42a3c
Show file tree
Hide file tree
Showing 5 changed files with 43 additions and 36 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# v0.14.3

- Fixup for conditional correlation matrix (thanks @JBeckUniTb, #404)
- z-score data using only the training data (#411)


# v0.14.2
Expand Down
26 changes: 14 additions & 12 deletions sbi/inference/snle/snle_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,18 +148,6 @@ def train(
self._round = max(self._data_round_index)
theta, x, _ = self.get_simulations(self._round, exclude_invalid_x, False)

# First round or if retraining from scratch:
# Call the `self._build_neural_net` with the rounds' thetas and xs as
# arguments, which will build the neural network
# This is passed into NeuralPosterior, to create a neural posterior which
# can `sample()` and `log_prob()`. The network is accessible via `.net`.
if self._neural_net is None or retrain_from_scratch_each_round:
self._neural_net = self._build_neural_net(theta, x)
self._x_shape = x_shape_from_simulation(x)
assert (
len(self._x_shape) < 3
), "SNLE cannot handle multi-dimensional simulator output."

# Starting index for the training set (1 = discard round-0 samples).
start_idx = int(discard_prior_samples and self._round > 0)
theta, x, _ = self.get_simulations(start_idx, exclude_invalid_x)
Expand Down Expand Up @@ -194,6 +182,20 @@ def train(
sampler=SubsetRandomSampler(val_indices),
)

# First round or if retraining from scratch:
# Call the `self._build_neural_net` with the rounds' thetas and xs as
# arguments, which will build the neural network
# This is passed into NeuralPosterior, to create a neural posterior which
# can `sample()` and `log_prob()`. The network is accessible via `.net`.
if self._neural_net is None or retrain_from_scratch_each_round:
self._neural_net = self._build_neural_net(
theta[train_indices], x[train_indices]
)
self._x_shape = x_shape_from_simulation(x)
assert (
len(self._x_shape) < 3
), "SNLE cannot handle multi-dimensional simulator output."

self._neural_net.to(self._device)
optimizer = optim.Adam(list(self._neural_net.parameters()), lr=learning_rate,)

Expand Down
22 changes: 12 additions & 10 deletions sbi/inference/snpe/snpe_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,16 +189,6 @@ def train(
# Load data from most recent round.
theta, x, _ = self.get_simulations(self._round, exclude_invalid_x, False)

# First round or if retraining from scratch:
# Call the `self._build_neural_net` with the rounds' thetas and xs as
# arguments, which will build the neural network.
# This is passed into NeuralPosterior, to create a neural posterior which
# can `sample()` and `log_prob()`. The network is accessible via `.net`.
if self._neural_net is None or retrain_from_scratch_each_round:
self._neural_net = self._build_neural_net(theta, x)
test_posterior_net_for_multi_d_x(self._neural_net, theta, x)
self._x_shape = x_shape_from_simulation(x)

# Starting index for the training set (1 = discard round-0 samples).
start_idx = int(discard_prior_samples and self._round > 0)

Expand Down Expand Up @@ -242,6 +232,18 @@ def train(
sampler=SubsetRandomSampler(val_indices),
)

# First round or if retraining from scratch:
# Call the `self._build_neural_net` with the rounds' thetas and xs as
# arguments, which will build the neural network.
# This is passed into NeuralPosterior, to create a neural posterior which
# can `sample()` and `log_prob()`. The network is accessible via `.net`.
if self._neural_net is None or retrain_from_scratch_each_round:
self._neural_net = self._build_neural_net(
theta[train_indices], x[train_indices]
)
test_posterior_net_for_multi_d_x(self._neural_net, theta, x)
self._x_shape = x_shape_from_simulation(x)

# Move entire net to device for training.
self._neural_net.to(self._device)
optimizer = optim.Adam(list(self._neural_net.parameters()), lr=learning_rate,)
Expand Down
28 changes: 15 additions & 13 deletions sbi/inference/snre/snre_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,19 +150,6 @@ def train(
self._round = max(self._data_round_index)
theta, x, _ = self.get_simulations(self._round, exclude_invalid_x, False)

# First round or if retraining from scratch:
# Call the `self._build_neural_net` with the rounds' thetas and xs as
# arguments, which will build the neural network
# This is passed into NeuralPosterior, to create a neural posterior which
# can `sample()` and `log_prob()`. The network is accessible via `.net`.
if self._neural_net is None or retrain_from_scratch_each_round:
self._neural_net = self._build_neural_net(theta, x)
self._x_shape = x_shape_from_simulation(x)
assert len(self._x_shape) < 3, (
"For now, SNRE cannot handle multi-dimensional simulator output, see "
"issue #360."
)

# Starting index for the training set (1 = discard round-0 samples).
start_idx = int(discard_prior_samples and self._round > 0)
theta, x, _ = self.get_simulations(start_idx, exclude_invalid_x)
Expand Down Expand Up @@ -203,6 +190,21 @@ def train(
sampler=SubsetRandomSampler(val_indices),
)

# First round or if retraining from scratch:
# Call the `self._build_neural_net` with the rounds' thetas and xs as
# arguments, which will build the neural network
# This is passed into NeuralPosterior, to create a neural posterior which
# can `sample()` and `log_prob()`. The network is accessible via `.net`.
if self._neural_net is None or retrain_from_scratch_each_round:
self._neural_net = self._build_neural_net(
theta[train_indices], x[train_indices]
)
self._x_shape = x_shape_from_simulation(x)
assert len(self._x_shape) < 3, (
"For now, SNRE cannot handle multi-dimensional simulator output, see "
"issue #360."
)

self._neural_net.to(self._device)
optimizer = optim.Adam(list(self._neural_net.parameters()), lr=learning_rate,)

Expand Down
2 changes: 1 addition & 1 deletion tests/inference_with_NaN_simulator_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def linear_gaussian_nan(
num_rounds = 2

for r in range(num_rounds):
theta, x = simulate_for_sbi(simulator, proposals[-1], 500)
theta, x = simulate_for_sbi(simulator, proposals[-1], 550)
rejection_estimator.append_simulations(theta, x)
if r < num_rounds - 1:
_ = rejection_estimator.train()
Expand Down

0 comments on commit ab42a3c

Please sign in to comment.