Skip to content

Commit

Permalink
more refactoring, NRE also working now
Browse files Browse the repository at this point in the history
  • Loading branch information
michaeldeistler committed May 17, 2021
1 parent 61619bb commit 768d830
Show file tree
Hide file tree
Showing 6 changed files with 250 additions and 232 deletions.
65 changes: 22 additions & 43 deletions sbi/inference/posteriors/direct_posterior.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,6 +296,7 @@ def sample(
`max_sampling_batch_size` to set the batch size for drawing new
samples from the candidate distribution, e.g., the posterior. Larger
batch size speeds up sampling.
Returns:
Samples from posterior.
"""
Expand Down Expand Up @@ -332,14 +333,31 @@ def sample(
)
)
if "proposal" not in rejection_sampling_parameters:
rejection_sampling_parameters["proposal"] = self.net
assert (
not self.net.training
), "Posterior nn must be in eval mode for sampling."

samples = self._sample_posterior_rejection(
num_samples=num_samples,
class ProposalDefaultX:
def __init__(self, posterior_nn: Any):
self.posterior_nn = posterior_nn

def sample(self, sample_shape, **kwargs):
return self.posterior_nn.sample(
sample_shape, context=x, **kwargs
)

def log_prob(self, theta, **kwargs):
xs = x.repeat(theta.shape[0], 1)
return self.posterior_nn.log_prob(theta, context=xs)

rejection_sampling_parameters["ignore_scaling"] = True
rejection_sampling_parameters["proposal"] = ProposalDefaultX(self.net)

samples, _ = rejection_sample(
potential_fn=potential_fn_provider(
self._prior, self.net, x, "rejection"
),
x=x,
num_samples=num_samples,
show_progress_bars=show_progress_bars,
**rejection_sampling_parameters,
)
Expand All @@ -352,45 +370,6 @@ def sample(

return samples.reshape((*sample_shape, -1))

@torch.no_grad()
def _sample_posterior_rejection(
self,
num_samples: int,
potential_fn: nn.Module,
proposal: Any,
x: Tensor,
show_progress_bars: bool = False,
warn_acceptance: float = 0.01,
sample_for_correction_factor: bool = False,
max_sampling_batch_size: int = 10_000,
) -> Tuple[Tensor, Tensor]:

assert not self.net.training, "Posterior nn must be in eval mode for sampling."

class ProposalDefaultX:
def __init__(self, posterior_nn: Any):
self.posterior_nn = posterior_nn

def sample(self, sample_shape, **kwargs):
return self.posterior_nn.sample(sample_shape, context=x, **kwargs)

def log_prob(self, theta, **kwargs):
xs = x.repeat(theta.shape[0], 1)
return self.posterior_nn.log_prob(theta, context=xs)

proposal = ProposalDefaultX(proposal)

samples, _ = rejection_sample(
potential_fn=potential_fn,
proposal=proposal,
num_samples=num_samples,
show_progress_bars=show_progress_bars,
warn_acceptance=warn_acceptance,
sample_for_correction_factor=sample_for_correction_factor,
max_sampling_batch_size=max_sampling_batch_size,
)
return samples

def sample_conditional(
self,
sample_shape: Shape,
Expand Down
99 changes: 5 additions & 94 deletions sbi/inference/posteriors/likelihood_based_posterior.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,8 @@ def __init__(
will draw init locations from prior, whereas `sir` will use Sequential-
Importance-Resampling using `init_strategy_num_candidates` to find init
locations.
rejection_sampling_parameters: Dictionary overriding the default parameters for
rejection sampling. The following parameters are supported:
rejection_sampling_parameters: Dictionary overriding the default parameters
for rejection sampling. The following parameters are supported:
`proposal`, as the proposal distribtution. `num_samples_to_find_max`
as the number of samples that are used to find the maximum of the
`potential_fn / proposal` ratio. `m` as multiplier to that ratio.
Expand Down Expand Up @@ -197,11 +197,11 @@ def sample(
if "proposal" not in rejection_sampling_parameters:
rejection_sampling_parameters["proposal"] = self._prior

samples = self._sample_posterior_rejection(
num_samples=num_samples,
samples, _ = rejection_sample(
potential_fn=potential_fn_provider(
self._prior, self.net, x, "rejection"
),
num_samples=num_samples,
**rejection_sampling_parameters,
)
else:
Expand All @@ -213,95 +213,6 @@ def sample(

return samples.reshape((*sample_shape, -1))

def _sample_posterior_rejection(
self,
num_samples: torch.Size,
potential_fn: Any,
proposal: Any,
num_samples_to_find_max: int = 10_000,
m: float = 1.2,
sampling_batch_size: int = 10_000,
):
r"""
Return samples from a distribution via rejection sampling.
This function is used in any case by SNLE and SNRE, but can also be used by SNPE
in order to deal with strong leakage. Depending on the inference method, a
different potential function for the rejection sampler is required.
Args:
num_samples: Desired number of samples.
potential_fn: Potential function used for rejection sampling.
proposal: Proposal distribution for rejection sampling.
num_samples_to_find_max: Number of samples that are used to find the maximum
of the `potential_fn / proposal` ratio.
m: Multiplier to the maximum ratio between potential function and the
proposal. A higher value will ensure that the samples are indeed from
the posterior, but will increase the rejection ratio and thus
computation time.
sampling_batch_size: Batchsize of samples being drawn from
the proposal at every iteration.
Returns:
Tensor of shape (num_samples, shape_of_single_theta).
"""

find_max = proposal.sample((num_samples_to_find_max,))

# Define a potential as the ratio between target distribution and proposal.
def potential_over_proposal(theta):
return torch.squeeze(potential_fn(theta)) - proposal.log_prob(theta)

# Search for the maximum of the ratio.
_, max_log_ratio = optimize_potential_fn(
potential_fn=potential_over_proposal,
inits=find_max,
dist_specifying_bounds=proposal,
num_iter=100,
learning_rate=0.01,
num_to_optimize=max(1, int(num_samples_to_find_max / 10)),
show_progress_bars=False,
)

if m < 1.0:
warn("A value of m < 1.0 will lead to systematically wrong results.")

class ScaledProposal:
def __init__(self, proposal: Any, max_log_ratio: float, log_m: float):
self.proposal = proposal
self.max_log_ratio = max_log_ratio
self.log_m = log_m

def sample(self, sample_shape, **kwargs):
return self.proposal.sample((sample_shape,), **kwargs)

def log_prob(self, theta, **kwargs):
return self.proposal.log_prob(theta) + self.max_log_ratio + self.log_m

scaled_proposal = ScaledProposal(
proposal, max_log_ratio, torch.log(torch.as_tensor(m))
)

samples, _ = rejection_sample(
potential_fn, scaled_proposal, num_samples=num_samples
)
return samples

num_accepted = 0
all_ = []
while num_accepted < num_samples:
candidates = proposal.sample((sampling_batch_size,))
probs = potential_fn(candidates)
target_log_probs = potential_fn(candidates)
proposal_log_probs = proposal.log_prob(candidates) + max_ratio
target_proposal_ratio = exp(target_log_probs - proposal_log_probs)
acceptance = rand(target_proposal_ratio.shape)
accepted = candidates[target_proposal_ratio > acceptance]
num_accepted += accepted.shape[0]
all_.append(accepted)
samples = torch.cat(all_)[:num_samples]
return samples

def sample_conditional(
self,
sample_shape: Shape,
Expand Down Expand Up @@ -506,7 +417,7 @@ def __call__(
likelihood_nn: Neural likelihood estimator that can be evaluated.
x: Conditioning variable for posterior $p(\theta|x)$. Can be a batch of iid
x.
mcmc_method: One of `slice_np`, `slice`, `hmc` or `nuts`.
method: One of `slice_np`, `slice`, `hmc` or `nuts`, `rejection`.
Returns:
Potential function for sampler.
Expand Down
Loading

0 comments on commit 768d830

Please sign in to comment.