Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Propagate model parameter names to optimizers #1536

Merged
merged 8 commits into from
Jul 29, 2021
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 29 additions & 3 deletions src/pyhf/optimize/mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,23 @@ def __init__(self, **kwargs):
)

def _internal_minimize(
self, func, x0, do_grad=False, bounds=None, fixed_vals=None, options={}
self,
func,
x0,
do_grad=False,
bounds=None,
fixed_vals=None,
options={},
par_names=None,
):

minimizer = self._get_minimizer(
func, x0, bounds, fixed_vals=fixed_vals, do_grad=do_grad
func,
x0,
bounds,
fixed_vals=fixed_vals,
do_grad=do_grad,
par_names=par_names,
)
result = self._minimize(
minimizer,
Expand Down Expand Up @@ -157,7 +169,21 @@ def minimize(
do_stitch=do_stitch,
)

result = self._internal_minimize(**minimizer_kwargs, options=kwargs)
# handle non-pyhf ModelConfigs
try:
par_names = pdf.config.par_names()
except AttributeError:
par_names = None

# need to remove parameters that are fixed in the fit
if par_names and do_stitch and fixed_vals:
for index, _ in fixed_vals:
par_names[index] = None
par_names = [name for name in par_names if name]

result = self._internal_minimize(
**minimizer_kwargs, options=kwargs, par_names=par_names
)
result = self._internal_postprocess(
result, stitch_pars, return_uncertainties=return_uncertainties
)
Expand Down
10 changes: 8 additions & 2 deletions src/pyhf/optimize/opt_minuit.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,13 @@ def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

def _get_minimizer(
self, objective_and_grad, init_pars, init_bounds, fixed_vals=None, do_grad=False
self,
objective_and_grad,
init_pars,
init_bounds,
fixed_vals=None,
do_grad=False,
par_names=None,
):

step_sizes = [(b[1] - b[0]) / float(self.steps) for b in init_bounds]
Expand All @@ -60,7 +66,7 @@ def _get_minimizer(
wrapped_objective = objective_and_grad
jac = None

minuit = iminuit.Minuit(wrapped_objective, init_pars, grad=jac)
minuit = iminuit.Minuit(wrapped_objective, init_pars, grad=jac, name=par_names)
minuit.errors = step_sizes
minuit.limits = init_bounds
minuit.fixed = fixed_bools
Expand Down
8 changes: 7 additions & 1 deletion src/pyhf/optimize/opt_scipy.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,13 @@ def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

def _get_minimizer(
self, objective_and_grad, init_pars, init_bounds, fixed_vals=None, do_grad=False
self,
objective_and_grad,
init_pars,
init_bounds,
fixed_vals=None,
do_grad=False,
par_names=None,
):
return scipy.optimize.minimize

Expand Down
30 changes: 30 additions & 0 deletions src/pyhf/pdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,6 +319,36 @@ def par_slice(self, name):
"""
return self.par_map[name]['slice']

def par_names(self, fstring='{name}[{index}]'):
"""
The names of the parameters in the model including binned-parameter indexing.

Args:
fstring (:obj:`str`): Format string for the parameter names using `name` and `index` variables. Default: '{name}[{index}]'.
matthewfeickert marked this conversation as resolved.
Show resolved Hide resolved

Returns:
:obj:`list`: Names of the model parameters.

Example:
>>> import pyhf
>>> model = pyhf.simplemodels.uncorrelated_background(
... signal=[12.0, 11.0], bkg=[50.0, 52.0], bkg_uncertainty=[3.0, 7.0]
... )
>>> model.config.par_names()
['mu', 'uncorr_bkguncrt[0]', 'uncorr_bkguncrt[1]']
>>> model.config.par_names(fstring='{name}_{index}')
['mu', 'uncorr_bkguncrt_0', 'uncorr_bkguncrt_1']
"""
_names = []
for name in self.par_order:
_npars = self.param_set(name).n_parameters
if _npars == 1:
_names.append(name)
continue

_names.extend([fstring.format(name=name, index=i) for i in range(_npars)])
return _names

def param_set(self, name):
"""
The :class:`~pyhf.parameters.paramset` for the model parameter ``name``.
Expand Down
14 changes: 14 additions & 0 deletions tests/test_optim.py
Original file line number Diff line number Diff line change
Expand Up @@ -573,3 +573,17 @@ def test_bad_solver_options_scipy(mocker):
model = pyhf.simplemodels.uncorrelated_background([50.0], [100.0], [10.0])
data = pyhf.tensorlib.astensor([125.0] + model.config.auxdata)
assert pyhf.infer.mle.fit(data, model).tolist()


def test_minuit_param_names(mocker):
pyhf.set_backend('numpy', 'minuit')
pdf = pyhf.simplemodels.uncorrelated_background([5], [10], [3.5])
data = [10] + pdf.config.auxdata
_, result = pyhf.infer.mle.fit(data, pdf, return_result_obj=True)
assert 'minuit' in result
assert result.minuit.parameters == ('mu', 'uncorr_bkguncrt')

pdf.config.par_names = mocker.Mock(return_value=None)
_, result = pyhf.infer.mle.fit(data, pdf, return_result_obj=True)
assert 'minuit' in result
assert result.minuit.parameters == ('x0', 'x1')
6 changes: 6 additions & 0 deletions tests/test_simplemodels.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ def test_correlated_background():
assert model.config.channels == ["single_channel"]
assert model.config.samples == ["background", "signal"]
assert model.config.par_order == ["mu", "correlated_bkg_uncertainty"]
assert model.config.par_names() == ['mu', 'correlated_bkg_uncertainty']
assert model.config.suggested_init() == [1.0, 0.0]


Expand All @@ -23,6 +24,11 @@ def test_uncorrelated_background():
assert model.config.channels == ["singlechannel"]
assert model.config.samples == ["background", "signal"]
assert model.config.par_order == ["mu", "uncorr_bkguncrt"]
assert model.config.par_names() == [
'mu',
'uncorr_bkguncrt[0]',
'uncorr_bkguncrt[1]',
]
assert model.config.suggested_init() == [1.0, 1.0, 1.0]


Expand Down