diff --git a/pymc3/backends/report.py b/pymc3/backends/report.py index 52ca87590f4..8db6705dc3a 100644 --- a/pymc3/backends/report.py +++ b/pymc3/backends/report.py @@ -102,11 +102,17 @@ def _run_convergence_checks(self, trace): warn = SamplerWarning( WarningType.CONVERGENCE, msg, 'error', None, None, effective_n) warnings.append(warn) + elif eff_min / n_samples < 0.1: + msg = ("The number of effective samples is smaller than " + "10% for some parameters.") + warn = SamplerWarning( + WarningType.CONVERGENCE, msg, 'warn', None, None, effective_n) + warnings.append(warn) elif eff_min / n_samples < 0.25: msg = ("The number of effective samples is smaller than " "25% for some parameters.") warn = SamplerWarning( - WarningType.CONVERGENCE, msg, 'warn', None, None, effective_n) + WarningType.CONVERGENCE, msg, 'info', None, None, effective_n) warnings.append(warn) self._add_warnings(warnings) diff --git a/pymc3/step_methods/hmc/base_hmc.py b/pymc3/step_methods/hmc/base_hmc.py index aebb401d343..319dfe99cb2 100644 --- a/pymc3/step_methods/hmc/base_hmc.py +++ b/pymc3/step_methods/hmc/base_hmc.py @@ -177,8 +177,9 @@ def warnings(self, strace): WarningType.DIVERGENCES, msg, 'error', None, None, None) warnings.append(warning) elif n_divs > 0: - message = ('Divergences after tuning. Increase `target_accept` or ' - 'reparameterize.') + message = ('There were %s divergences after tuning. Increase ' + '`target_accept` or reparameterize.' + % n_divs) warning = SamplerWarning( WarningType.DIVERGENCES, message, 'error', None, None, None) warnings.append(warning) diff --git a/pymc3/step_methods/hmc/nuts.py b/pymc3/step_methods/hmc/nuts.py index 6a5815743fe..f4925def3c8 100644 --- a/pymc3/step_methods/hmc/nuts.py +++ b/pymc3/step_methods/hmc/nuts.py @@ -170,7 +170,8 @@ def _hamiltonian_step(self, start, p0, step_size): if divergence_info or turning: break else: - self._reached_max_treedepth += 1 + if not self.tune: + self._reached_max_treedepth += 1 stats = tree.stats() accept_stat = stats['mean_tree_accept'] @@ -185,8 +186,10 @@ def competence(var, has_grad): def warnings(self, strace): warnings = super(NUTS, self).warnings(strace) + n_samples = self._samples_after_tune + n_treedepth = self._reached_max_treedepth - if np.mean(self._reached_max_treedepth) > 0.05: + if n_samples > 0 and n_treedepth / float(n_samples) > 0.05: msg = ('The chain reached the maximum tree depth. Increase ' 'max_treedepth, increase target_accept or reparameterize.') warn = SamplerWarning(WarningType.TREEDEPTH, msg, 'warn',